From 43278cc41c45bc9583aef9735ca88266312aff3b Mon Sep 17 00:00:00 2001 From: Aliaksandr Yakutovich Date: Tue, 17 Dec 2024 15:09:02 +0000 Subject: [PATCH 1/3] Apply scaling factor when reading the cube files --- cubehandler/cube.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/cubehandler/cube.py b/cubehandler/cube.py index f16c31e..cd72aec 100644 --- a/cubehandler/cube.py +++ b/cubehandler/cube.py @@ -75,7 +75,7 @@ def __init__( self.cell_n = cell_n @classmethod - def from_file_handle(cls, filehandle, read_data=True): + def from_file_handle(cls, filehandle, read_data=True, apply_scaling=True): f = filehandle c = cls() c.title = f.readline().rstrip() @@ -131,18 +131,20 @@ def from_file_handle(cls, filehandle, read_data=True): # data = np.array(f.read().split(), dtype=float) c.data = c.data.reshape(c.cell_n) + if apply_scaling: + c.data *= c.scaling_factor return c @classmethod - def from_file(cls, filepath, read_data=True): + def from_file(cls, filepath, read_data=True, apply_scaling=True): with open(filepath) as f: - c = cls.from_file_handle(f, read_data=read_data) + c = cls.from_file_handle(f, read_data=read_data, apply_scaling=apply_scaling) return c @classmethod - def from_content(cls, content, read_data=True): - return cls.from_file_handle(io.StringIO(content), read_data=read_data) + def from_content(cls, content, read_data=True, apply_scaling=True): + return cls.from_file_handle(io.StringIO(content), read_data=read_data, apply_scaling=apply_scaling) def write_cube_file(self, filename, low_precision=False): From f182ae69a124a61086e32781d1f195d76e79835a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 15:10:21 +0000 Subject: [PATCH 2/3] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- cubehandler/cube.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/cubehandler/cube.py b/cubehandler/cube.py index cd72aec..a231783 100644 --- a/cubehandler/cube.py +++ b/cubehandler/cube.py @@ -139,12 +139,16 @@ def from_file_handle(cls, filehandle, read_data=True, apply_scaling=True): @classmethod def from_file(cls, filepath, read_data=True, apply_scaling=True): with open(filepath) as f: - c = cls.from_file_handle(f, read_data=read_data, apply_scaling=apply_scaling) + c = cls.from_file_handle( + f, read_data=read_data, apply_scaling=apply_scaling + ) return c @classmethod def from_content(cls, content, read_data=True, apply_scaling=True): - return cls.from_file_handle(io.StringIO(content), read_data=read_data, apply_scaling=apply_scaling) + return cls.from_file_handle( + io.StringIO(content), read_data=read_data, apply_scaling=apply_scaling + ) def write_cube_file(self, filename, low_precision=False): From fd9b9e6b645208ad75144be9d5feba8a5bf7153f Mon Sep 17 00:00:00 2001 From: Aliaksandr Yakutovich Date: Tue, 17 Dec 2024 15:14:48 +0000 Subject: [PATCH 3/3] Fix test_reduce_data_density --- tests/test_cube.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_cube.py b/tests/test_cube.py index 34155ae..162e638 100644 --- a/tests/test_cube.py +++ b/tests/test_cube.py @@ -88,6 +88,6 @@ def test_reduce_data_density(): cube.reduce_data_density(points_per_angstrom=2) cube.write_cube_file("low_res.cube", low_precision=True) low_res = Cube.from_file("low_res.cube") - low_res_integral = np.sum(low_res.data**2) * low_res.dv_au * low_res.scaling_f**2 + low_res_integral = np.sum(low_res.data**2) * low_res.dv_au assert np.abs(low_res_integral - integral) < 0.01 assert cube.scaling_f == 0.2848452