Skip to content

Commit

Permalink
scipp update, dataset to datagroup
Browse files Browse the repository at this point in the history
  • Loading branch information
andped10 committed Jan 9, 2024
1 parent b0f9b5e commit 12cbb4a
Show file tree
Hide file tree
Showing 11 changed files with 311 additions and 253 deletions.
60 changes: 34 additions & 26 deletions EasyReflectometry/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,60 +6,66 @@
from orsopy.fileio import orso, Header


def load(fname: Union[TextIO, str]) -> sc.Dataset:
def load(fname: Union[TextIO, str]) -> sc.DataGroup:
"""
Load data from an ORSO .ort file.
:param fname: The file to be read.
:return: A scipp Dataset for the loaded datasets.
:return: A scipp DataGroup for the loaded datasets.
"""
try:
return _load_orso(fname)
except (IndexError, ValueError):
return _load_txt(fname)


def _load_orso(fname: Union[TextIO, str]) -> sc.Dataset:
def _load_orso(fname: Union[TextIO, str]) -> sc.DataGroup:
"""
Load from an ORSO compatible file.
:param fname: The path for the file to be read.
:return: A populated scipp dataset.
:return: A populated scipp DataGroup.
"""
data = {}
coords = {}
attrs = {}
f_data = orso.load_orso(fname)
for i, o in enumerate(f_data):
name = i
if o.info.data_set is not None:
name = o.info.data_set
coords = {}
coords[f'Qz_{name}'] = sc.array(dims=[f'{o.info.columns[0].name}_{name}'],
values=o.data[:, 0],
variances=np.square(o.data[:, 3]),
unit=sc.Unit(o.info.columns[0].unit))
coords[f'Qz_{name}'] = sc.array(
dims=[f'{o.info.columns[0].name}_{name}'],
values=o.data[:, 0],
variances=np.square(o.data[:, 3]),
unit=sc.Unit(o.info.columns[0].unit)
)
try:
ordinate = sc.array(dims=[f'{o.info.columns[0].name}_{name}'],
values=o.data[:, 1],
variances=np.square(o.data[:, 2]),
unit=sc.Unit(o.info.columns[1].unit))
data[f'R_{name}'] = sc.array(
dims=[f'{o.info.columns[0].name}_{name}'],
values=o.data[:, 1],
variances=np.square(o.data[:, 2]),
unit=sc.Unit(o.info.columns[1].unit)
)
except TypeError:
ordinate = sc.array(dims=[f'{o.info.columns[0].name}_{name}'],
values=o.data[:, 1],
variances=np.square(o.data[:, 2]))
attrs = {'orso_header': sc.scalar(Header.asdict(o.info))}
data[f'R_{name}'] = sc.DataArray(data=ordinate, coords=coords, attrs=attrs)
return sc.Dataset(data=data)
data[f'R_{name}'] = sc.array(
dims=[f'{o.info.columns[0].name}_{name}'],
values=o.data[:, 1],
variances=np.square(o.data[:, 2])
)
attrs[f'R_{name}'] = {'orso_header': sc.scalar(Header.asdict(o.info))}
return sc.DataGroup(data=data, coords=coords, attrs=attrs)


def _load_txt(fname: Union[TextIO, str]) -> sc.Dataset:
def _load_txt(fname: Union[TextIO, str]) -> sc.DataGroup:
"""
Load data from a simple txt file.
:param fname: The path for the file to be read.
:return: A populated scipp dataset.
:return: A populated scipp DataGroup.
"""
f_data = np.loadtxt(fname)
data = {
Expand All @@ -68,9 +74,11 @@ def _load_txt(fname: Union[TextIO, str]) -> sc.Dataset:
}
coords = {
data['R_0'].dims[0]:
sc.array(dims=['Qz_0'],
values=f_data[:, 0],
variances=np.square(f_data[:, 3]),
unit=sc.Unit('1/angstrom'))
sc.array(
dims=['Qz_0'],
values=f_data[:, 0],
variances=np.square(f_data[:, 3]),
unit=sc.Unit('1/angstrom')
)
}
return sc.Dataset(data=data, coords=coords)
return sc.DataGroup(data=data, coords=coords)
42 changes: 24 additions & 18 deletions EasyReflectometry/fitting.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class Fitter:
def __init__(self, *args: Model):
"""
A convinence class for the :py:class:`easyCore.Fitting.Fitting`
which will populate the :py:class:`sc.Dataset` appropriately
which will populate the :py:class:`sc.DataGroup` appropriately
after the fitting is performed.
:param model: Reflectometry model
Expand All @@ -32,35 +32,41 @@ def wrapped(*args, **kwargs):
self._models = args
self.easy_f = easyFitter(args, self._fit_func)

def fit(self, data: sc.Dataset, method: str = 'least_squares', id=0):
def fit(self, data: sc.DataGroup, method: str = 'least_squares', id: int = 0) -> sc.DataGroup:
"""
Perform the fitting and populate the datasets with the result.
Perform the fitting and populate the DataGroups with the result.
:param data: Dataset to be fitted to and populated
:param data: DataGroup to be fitted to and populated
:param method: Optimisation method
"""
refl_nums = [k[3:] for k, v in data.coords.items() if 'Qz' == k[:2]]
x = [data.coords[f'Qz_{i}'].values for i in refl_nums]
y = [data[f'R_{i}'].data.values for i in refl_nums]
dy = [1 / np.sqrt(data[f'R_{i}'].data.variances) for i in refl_nums]
refl_nums = [k[3:] for k in data['coords'].keys() if 'Qz' == k[:2]]
x = [data['coords'][f'Qz_{i}'].values for i in refl_nums]
y = [data['data'][f'R_{i}'].values for i in refl_nums]
dy = [1 / np.sqrt(data['data'][f'R_{i}'].variances) for i in refl_nums]
result = self.easy_f.fit(x, y, weights=dy, method=method)
new_data = data.copy()
for i, _ in enumerate(result):
id = refl_nums[i]
new_data[f'R_{id}_model'] = sc.array(dims=[f'Qz_{id}'],
values=self._fit_func[i](
data.coords[f'Qz_{id}'].values))
new_data[f'R_{id}_model'] = sc.array(
dims=[f'Qz_{id}'],
values=self._fit_func[i](
data['coords'][f'Qz_{id}'].values)
)
sld_profile = self.easy_f._fit_objects[i].interface.sld_profile(
self._models[i].uid)
new_data[f'SLD_{id}'] = sc.array(dims=[f'z_{id}'],
values=sld_profile[1] * 1e-6,
unit=sc.Unit('1/angstrom')**2)
new_data[f'R_{id}_model'].attrs['model'] = sc.scalar(
self._models[i].as_dict())
new_data.coords[f'z_{id}'] = sc.array(
new_data[f'SLD_{id}'] = sc.array(
dims=[f'z_{id}'],
values=sld_profile[1] * 1e-6,
unit=sc.Unit('1/angstrom')**2
)
new_data['attrs'][f'R_{id}_model'] = {
'model' : sc.scalar(self._models[i].as_dict())
}
new_data['coords'][f'z_{id}'] = sc.array(
dims=[f'z_{id}'],
values=sld_profile[0],
unit=(1 / new_data.coords[f'Qz_{id}'].unit).unit)
unit=(1 / new_data['coords'][f'Qz_{id}'].unit).unit
)
return new_data


Expand Down
2 changes: 1 addition & 1 deletion EasyReflectometry/measurement/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
from orsopy.fileio import orso


def load(fname: Union[TextIO, str]) -> sc.Dataset:
def load(fname: Union[TextIO, str]) -> sc.DataGroup:
return orso.load_orso(fname)
21 changes: 15 additions & 6 deletions EasyReflectometry/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,11 @@
color_cycle = plt.rcParams['axes.prop_cycle'].by_key()['color']


def plot(data: sc.Dataset):
def plot(data: sc.DataGroup) -> None:
"""
A general plotting function for EasyReflectometry.
:param data: the Dataset to be plotted.
:param data: the DataGroup to be plotted.
:returns: The plot canvas.
"""
Expand All @@ -25,9 +25,12 @@ def plot(data: sc.Dataset):
gs = GridSpec(2, 1, figure=fig)
ax2 = fig.add_subplot(gs[1, 0])
ax1 = fig.add_subplot(gs[0, 0])
refl_nums = [k[3:] for k, v in data.coords.items() if 'Qz' == k[:2]]
refl_nums = [k[3:] for k in data['coords'].keys() if 'Qz' == k[:2]]
for i, refl_num in enumerate(refl_nums):
copy = data[f'R_{refl_num}'].copy()
copy = sc.DataArray(
data=data['data'][f'R_{refl_num}'],
coords={f'Qz_{refl_num}': data['coords'][f'Qz_{refl_num}']}
)
copy.data *= sc.scalar(10.**i, unit=copy.unit)
copy.coords[f'Qz_{refl_num}'].variances = None
sc.plot(
Expand All @@ -39,7 +42,10 @@ def plot(data: sc.Dataset):
color=color_cycle[i]
)
try:
copy = data[f'R_{refl_num}_model'].copy()
copy = sc.DataArray(
data=data['data'][f'R_{refl_num}_model'],
coords={f'Qz_{refl_num}': data['coords'][f'Qz_{refl_num}']}
)
copy.data *= sc.scalar(10.**float(i))
copy.coords[f'Qz_{refl_num}'].variances = None
sc.plot(
Expand All @@ -58,7 +64,10 @@ def plot(data: sc.Dataset):

if plot_sld:
for i, refl_num in enumerate(refl_nums):
copy = data[f'SLD_{refl_num}'].copy()
copy = sc.DataArray(
data=data[f'SLD_{refl_num}'],
coords={f'z_{refl_num}': data['coords'][f'z_{refl_num}']}
)
copy.data += sc.scalar(10. * i, unit=copy.unit)
sc.plot(
data[f'SLD_{refl_num}'],
Expand Down
197 changes: 174 additions & 23 deletions docs/monolayer.ipynb

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions docs/multi_contrast.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -315,9 +315,9 @@
"d13d2o_structure = Structure.from_pars(air_layer, d13d2o, d2o_layer)\n",
"d70d2o_structure = Structure.from_pars(air_layer, d70d2o, d2o_layer)\n",
"d83acmw_structure = Structure.from_pars(air_layer, d83acmw, acmw_layer)\n",
"d13d2o_model = Model.from_pars(d13d2o_structure, 0.1, data['R_d13DSPC-D2O'].data.values.min(), 5)\n",
"d70d2o_model = Model.from_pars(d70d2o_structure, 0.1, data['R_d70DSPC-D2O'].data.values.min(), 5)\n",
"d83acmw_model = Model.from_pars(d83acmw_structure, 0.1, data['R_d83DSPC-ACMW'].data.values.min(), 5)"
"d13d2o_model = Model.from_pars(d13d2o_structure, 0.1, data['data']['R_d13DSPC-D2O'].values.min(), 5)\n",
"d70d2o_model = Model.from_pars(d70d2o_structure, 0.1, data['data']['R_d70DSPC-D2O'].values.min(), 5)\n",
"d83acmw_model = Model.from_pars(d83acmw_structure, 0.1, data['data']['R_d83DSPC-ACMW'].values.min(), 5)"
]
},
{
Expand All @@ -344,7 +344,7 @@
"d70d2o_model.scale.bounds = (0.05, 1.5)\n",
"d70d2o_model.background.bounds = (4e-8, 1e-5)\n",
"d83acmw_model.scale.bounds = (0.05, 1.5)\n",
"d83acmw_model.background.bounds = (4e-8, 1e-5)\n",
"d83acmw_model.background.bounds = (4e-8, 1e-4)\n",
"\n",
"d13d2o.layers[0].area_per_molecule.bounds = (40, 50)\n",
"d13d2o.layers[1].solvation.bounds = (0.2, 0.6)\n",
Expand Down Expand Up @@ -448,7 +448,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
"version": "3.11.7"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion docs/repeating.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
"version": "3.11.7"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion docs/simple_fitting.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -519,7 +519,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
"version": "3.11.7"
}
},
"nbformat": 4,
Expand Down
Loading

0 comments on commit 12cbb4a

Please sign in to comment.