Skip to content

Commit

Permalink
Model bug fixes -> 0.2.4. Should now support phasing experiments (?)
Browse files Browse the repository at this point in the history
  • Loading branch information
rhfogh committed Nov 11, 2024
1 parent e60a91e commit 525007d
Show file tree
Hide file tree
Showing 4 changed files with 80 additions and 52 deletions.
8 changes: 6 additions & 2 deletions mxcubecore/HardwareObjects/Gphl/GphlWorkflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import subprocess
import time
from collections import OrderedDict
from uuid import uuid1

import f90nml
import gevent
Expand Down Expand Up @@ -815,7 +816,10 @@ def start_enactment(self, enactment_id:str, correlation_id:str):
workflow_parameters.get("workflow_type")
or data_model.strategy_type
)
tracking_data.location_id = workflow_parameters.get("workflow_position_id")
tracking_data.location_id = (
workflow_parameters.get("workflow_position_id")
or uuid1().hex
)
# NB first orientation only:
tracking_data.orientation_id = workflow_parameters.get(
"workflow_kappa_settings_id"
Expand Down Expand Up @@ -2028,7 +2032,7 @@ def collect_data(self, payload, correlation_id):
data_collection.workflow_parameters = new_workflow_parameters
tracking_data = data_collection.tracking_data
tracking_data.uuid = scan.id_
tracking_data.workflow_name = wf_tracking_data.experiment_strategy
tracking_data.workflow_name = wf_tracking_data.workflow_name
tracking_data.workflow_type = wf_tracking_data.workflow_type
tracking_data.workflow_uid = wf_tracking_data.uuid
tracking_data.location_id = wf_tracking_data.location_id
Expand Down
4 changes: 2 additions & 2 deletions mxcubecore/configuration/mockup/gphl/gphl-setup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,6 @@ software_properties:

# OPTIONAL. simcal *binary* For Mock collection emulation only. Not used by workflow
co.gphl.wf.simcal.bin:
/alt/rhfogh/Software/GPhL/nightly_20240611/Files_workflow_TRUNK_alpha-bdg/autoPROC/bin/linux64/simcal
/alt/rhfogh/Software/GPhL/nightly_20241108/Files_workflow_TRUNK_alpha-bdg/autoPROC/bin/linux64/simcal
co.gphl.wf.simcal.bdg_licence_dir:
/alt/rhfogh/Software/GPhL/nightly_20240611/Files_workflow_TRUNK_alpha-bdg
/alt/rhfogh/Software/GPhL/nightly_20241108/Files_workflow_TRUNK_alpha-bdg
2 changes: 1 addition & 1 deletion mxcubecore/queue_entry/data_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def post_execute(self):
data_model = self.get_data_model()
# This would be a good place to check that scan_pos_end matches input parameters
# There have been tricky bugs found where this was not the case
mxutils.add_sweep(
mxutils.add_data_collection(
self.get_mxlims_record(),
data_model,
beam_position=beam_position,
Expand Down
118 changes: 71 additions & 47 deletions mxcubecore/utils/mxlims.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,7 @@
from mxlims.pydantic import core


def create_mxexperiment(
datamodel: qmo.TaskNode, **parameters
) -> mxmodel.MXExperiment:
def create_mxexperiment(datamodel: qmo.TaskNode, **parameters) -> mxmodel.MXExperiment:
"""Create MXExperiment mxlims record from datamodel
Args:
Expand All @@ -59,7 +57,12 @@ def create_mxexperiment(
workflow_name = diffraction_plan.experimentType
else:
workflow_name = diffraction_plan.get("experimentType")
workflow_name = workflow_name or datamodel.experiment_type
if not workflow_name:
try:
workflow_name = datamodel.experiment_type
except AttributeError:
workflow_name = None
initpars["experiment_strategy"] = workflow_name

if diffraction_plan:
# It is not clear if diffraction_plan is a dict or an object,
Expand Down Expand Up @@ -142,17 +145,16 @@ def create_mxexperiment(
return result


def add_sweep(
def add_data_collection(
mxexperiment: mxmodel.MXExperiment,
sweep: qmo.DataCollection,
data_collection: qmo.DataCollection,
**parameters: dict,
) -> None:
"""
Args:
mxexperiment: container MXExperiment
sweep: DataCollection queue_model_object to add
uuid: String containing globally unique identifier
data_collection: DataCollection queue_model_object to add
**parameters: dict of parameters overriding/supplementing datamodel
Returns:
Expand All @@ -163,64 +165,86 @@ def add_sweep(
# ALwsy true in MXCuBE
SCAN_AXIS = "omega"

acquisition = sweep.acquisitions[0]
acquisition = data_collection.acquisitions[0]
path_template = acquisition.path_template
acqparams = acquisition.acquisition_parameters

sweep_params = {
"source_ref": mxmodel.MXExperimentRef(target_uuid=mxexperiment.uuid),
"scan_axis": SCAN_AXIS,
"exposure_time": acqparams.exp_time,
"image_width": acqparams.osc_range,
"energy": acqparams.energy,
"transmission": acqparams.transmission,
"resolution": acqparams.resolution,
"detector_binning_mode": acqparams.detector_binning_mode,
"detector_roi_mode": acqparams.detector_roi_mode,
"overlap": acqparams.overlap,
"number_triggers": acqparams.num_triggers,
"number_images_per_trigger": acqparams.num_images_per_trigger,
"prefix": path_template.get_prefix(),
"file_type": path_template.suffix,
"filename_template": path_template.get_image_file_name(),
"path": path_template.directory,
}

sweep_params["axis_positions_start"] = startpos = dict(
tracking_data = data_collection.tracking_data
startpos = dict(
tpl
for tpl in acqparams.centred_position.as_dict().items()
if tpl[1] is not None
)
startpos[SCAN_AXIS] = acqparams.osc_start
axis_pos_start = acqparams.osc_start
axis_pos_end = axis_pos_start + acqparams.num_images * acqparams.osc_range
startpos[SCAN_AXIS] = axis_pos_start
startpos["detector_distance"] = acqparams.detector_distance

detector_distance = parameters.pop("detector_distance", None)
if detector_distance is not None:
startpos["detector_distance"] = detector_distance
scan = mxmodel.Scan(
scan_position_start=startpos[SCAN_AXIS],
scan_position_start=axis_pos_start,
first_image_number=acqparams.first_image,
number_images=acqparams.num_images,
ordinal=1,
ordinal=tracking_data.scan_number or 0,
)
sweep_params["scans"] = [scan]
scan_pos_end = parameters.pop("scan_position_end", None)
sweep_params["axis_positions_end"] = {SCAN_AXIS: scan_pos_end}

# NBNB interleaving, split sweeps, split characterisation
# NBNB cxheck final omega value against start
# NBNB how do we get the detector type?
# NBNB do we use MXCuBE axis names or standardised names?
# detector_type, ,, ,
# , axis_positions_end,
# NBNB change from QMO to dict input
sweep_id = tracking_data.sweep_id
sweep = None
for dataset in mxexperiment.results:
if dataset.uuid == sweep_id:
sweep = dataset
break
if sweep:
# This is a scan for an existing sweep. Add ane update
sweep.scans.append(scan)
sweep.axis_positions_start[SCAN_AXIS] = min(
sweep.axis_positions_start[SCAN_AXIS], axis_pos_start
)
sweep.axis_positions_end[SCAN_AXIS] = max(
sweep.axis_positions_end[SCAN_AXIS], axis_pos_end
)

else:
sweep_params = {
"source_ref": mxmodel.MXExperimentRef(
target_uuid=tracking_data.workflow_uid
),
"role": tracking_data.role,
"logistical_sample_ref": core.LogisticalSampleRef(
target_uuid=tracking_data.location_id
),
"scan_axis": SCAN_AXIS,
"exposure_time": acqparams.exp_time,
"image_width": acqparams.osc_range,
"energy": acqparams.energy,
"transmission": acqparams.transmission,
"resolution": acqparams.resolution,
"detector_binning_mode": acqparams.detector_binning_mode,
"detector_roi_mode": acqparams.detector_roi_mode,
"overlap": acqparams.overlap,
"number_triggers": acqparams.num_triggers,
"number_images_per_trigger": acqparams.num_images_per_trigger,
"prefix": path_template.get_prefix(),
"file_type": path_template.suffix,
"filename_template": path_template.get_image_file_name(),
"path": path_template.directory,
"axis_positions_start": startpos,
"scans": [scan],
}

scan_pos_end = parameters.pop("scan_position_end", None)
sweep_params["axis_positions_end"] = {SCAN_AXIS: scan_pos_end}

# NBNB cxheck final omega value against start
# NBNB how do we get the detector type?
# NBNB do we use MXCuBE axis names or standardised names?

sweep_params.update(parameters)
mxexperiment.results.append(mxmodel.CollectionSweep(**sweep_params))
sweep_params.update(parameters)
mxexperiment.results.append(mxmodel.CollectionSweep(**sweep_params))


def export_mxexperiment(
mxexperiment: mxmodel.MXExperiment, path_template: Optional[qmo.PathTemplate]=None
mxexperiment: mxmodel.MXExperiment, path_template: Optional[qmo.PathTemplate] = None
):
"""Export MXExperiment mxlims record to JSON file"""
if path_template is None:
Expand Down

0 comments on commit 525007d

Please sign in to comment.