From c8045810b919428c6f3d3826dcb9675265b3286a Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Thu, 9 Apr 2026 14:44:22 +0100 Subject: [PATCH 01/19] Renamed 'self._milling' to 'self._milling_images' and 'MillingProgress' to 'MillingImage' --- src/murfey/client/contexts/fib.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index d1cb43fa..9ba18136 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -24,7 +24,7 @@ class Lamella(NamedTuple): angle: float | None = None -class MillingProgress(NamedTuple): +class MillingImage(NamedTuple): file: Path timestamp: float @@ -84,8 +84,8 @@ def __init__( super().__init__("FIB", acquisition_software, token) self._basepath = basepath self._machine_config = machine_config - self._milling: dict[int, list[MillingProgress]] = {} self._lamellae: dict[int, Lamella] = {} + self._milling_images: dict[int, list[MillingImage]] = {} def post_transfer( self, @@ -139,16 +139,16 @@ def post_transfer( f"File {transferred_file.name!r} not found on storage system" ) return - if not self._milling.get(lamella_number): - self._milling[lamella_number] = [ - MillingProgress( + if not self._milling_images.get(lamella_number): + self._milling_images[lamella_number] = [ + MillingImage( timestamp=timestamp, file=destination_file, ) ] else: - self._milling[lamella_number].append( - MillingProgress( + self._milling_images[lamella_number].append( + MillingImage( timestamp=timestamp, file=destination_file, ) @@ -156,7 +156,7 @@ def post_transfer( gif_list = [ l.file for l in sorted( - self._milling[lamella_number], key=lambda x: x.timestamp + self._milling_images[lamella_number], key=lambda x: x.timestamp ) ] raw_directory = Path( From 4c59eb3bd12bce29230ec75a84c3a7b452a76df5 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Thu, 16 Apr 2026 18:15:59 +0100 Subject: [PATCH 02/19] Added the helper functions, classes, and mapping variables needed to parse and extract FIB AutoTEM metadata for posting --- src/murfey/client/contexts/fib.py | 477 +++++++++++++++++++++++++++--- 1 file changed, 440 insertions(+), 37 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index bf67ecc9..57970492 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -3,11 +3,13 @@ import logging import re import threading +import xml.etree.ElementTree as ET +from dataclasses import dataclass from datetime import datetime from pathlib import Path -from typing import NamedTuple +from typing import Callable, TypeVar -import xmltodict +from pydantic import BaseModel, computed_field from murfey.client.context import Context from murfey.client.instance_environment import MurfeyInstanceEnvironment @@ -18,13 +20,192 @@ lock = threading.Lock() -class Lamella(NamedTuple): - name: str - number: int - angle: float | None = None +class StagePositionValues(BaseModel): + # Coordinates are in metres + x: float | None = None + y: float | None = None + z: float | None = None + # Angles are in degrees + rotation: float | None = None + tilt_alpha: float | None = None + @computed_field + def slot_number(self) -> int | None: + if self.x is None: + return None + return 1 if self.x < 0 else 2 -class MillingImage(NamedTuple): + +class StagePositionInfo(BaseModel): + thinning: StagePositionValues | None = None + chunk_coincidence: StagePositionValues | None = None + thinning_stage: StagePositionValues | None = None + preparation: StagePositionValues | None = None + chunk_site: StagePositionValues | None = None + + +class WidthOverlap(BaseModel): + front_left: float | None = None + front_right: float | None = None + rear_left: float | None = None + rear_right: float | None = None + + +class MillingStepInfo(BaseModel): + """ + These are the parameters configured per milling step that we are interested + in tracking. Some attributes are present only for certain steps. + """ + + # Step setup + enabled: bool | None = None + status: str | None = None + execution_time: float | None = None + + # Beam info + beam_type: str | None = None + voltage: float | None = None + current: float | None = None + + # Lamella dimensions + + # Milling info + milling_angle: float | None = None + depth_correction: float | None = None + lamella_offset: float | None = None + trench_height_front: float | None = None + trench_height_rear: float | None = None + width_overlap_front_left: float | None = None + width_overlap_front_right: float | None = None + width_overlap_rear_left: float | None = None + width_overlap_rear_right: float | None = None + + +class LamellaSiteInfo(BaseModel): + """ + These parameters are not associated with a single milling step + """ + + site_name: str | None = None + stage_info: StagePositionInfo | None = None + + """ + These are all the possible processing steps found in the ProjectData.dat file. + It contains information on whether the step has been enabled as well as its + current staate. + """ + # Preparation stage + eucentric_tilt: MillingStepInfo | None = None + artificial_features: MillingStepInfo | None = None + milling_angle: MillingStepInfo | None = None + image_acquisition: MillingStepInfo | None = None + lamella_placement: MillingStepInfo | None = None + # Milling stage + delay_1: MillingStepInfo | None = None + reference_definition_1: MillingStepInfo | None = None + reference_definition_1_electron: MillingStepInfo | None = None + stress_relief_cuts: MillingStepInfo | None = None + reference_definition_2: MillingStepInfo | None = None + rough_milling: MillingStepInfo | None = None + rough_milling_electron: MillingStepInfo | None = None + reference_definition_3: MillingStepInfo | None = None + medium_milling: MillingStepInfo | None = None + medium_milling_electron: MillingStepInfo | None = None + fine_milling: MillingStepInfo | None = None + fine_milling_electron: MillingStepInfo | None = None + finer_milling: MillingStepInfo | None = None + finer_milling_electron: MillingStepInfo | None = None + # Thinning stage + delay_2: MillingStepInfo | None = None + polishing_1: MillingStepInfo | None = None + polishing_1_electron: MillingStepInfo | None = None + polishing_2: MillingStepInfo | None = None + polishing_2_ion: MillingStepInfo | None = None + polishing_2_electron: MillingStepInfo | None = None + + +MILLING_STEP_NAMES = { + # Map unique activity name to class attribute + # Preparation stage + "Preparation - Eucentric Tilt": "eucentric_tilt", + "Preparation - Artificial Features": "artificial_features", + "Preparation - Milling Angle": "milling_angle", + "Preparation - Image Acquisition": "image_acquisition", + "Preparation - Lamella Placement": "lamella_placement", + # Milling stage + "Milling - Delay": "delay_1", + "Milling - Reference Definition": "reference_definition_1", + "Milling - Electron Reference Definition": "reference_definition_1_electron", + "Milling - Stress Relief Cuts": "stress_relief_cuts", + "Milling - Reference Redefinition 1": "reference_definition_2", + "Milling - Rough Milling": "rough_milling", + "Milling - Rough Milling - Electron Image": "rough_milling_electron", + "Milling - Reference Redefinition 2": "reference_definition_3", + "Milling - Medium Milling": "medium_milling", + "Milling - Medium Milling - Electron Image": "medium_milling_electron", + "Milling - Fine Milling": "fine_milling", + "Milling - Fine Milling - Electron Image": "fine_milling_electron", + "Milling - Finer Milling": "finer_milling", + "Milling - Finer Milling - Electron Image": "finer_milling_electron", + # Thinning stage + "Thinning - Delay": "delay_2", + "Thinning - Polishing 1": "polishing_1", + "Thinning - Polishing 1 - Electron Image": "polishing_1_electron", + "Thinning - Polishing 2": "polishing_2", + "Thinning - Polishing 2 - Ion Image": "polishing_2_ion", + "Thinning - Polishing 2 - Electron Image": "polishing_2_electron", +} + + +STAGE_POSITION_VALUES = { + # Map class attribute to element name + # Paths are relative to the "StagePosition" node + "x": "X", + "y": "Y", + "z": "Z", + "rotation": "R", + "tilt_alpha": "AT", +} + + +STAGE_POSITION_NAMES = { + # Map class attribute to element name + # Paths are relative to the "Site" node + "thinning": "ThinningSiteLocation/StagePosition/StagePosition", + # These stage position fields are also present + # but it's unclear which milling steps they correspond to + "chunk_coincidence": "Parameters/ChunkCoincidenceStagePosition/StagePosition", + "thinning_stage": "Parameters/ThinningStagePosition/StagePosition", + "preparation": "PreparationSiteLocation/StagePosition/StagePosition", + "chunk_site": "ChunkSiteLocation/StagePosition/StagePosition", +} + + +BEAM_VALUES = { + # Map class attribute to element name + # These are relative to the "MillingPreset" or "BeamPreset" node + "beam_type": "BeamType", + "voltage": "HighVoltage", + "current": "BeamCurrent", +} + + +LAMELLA_MILLING_VALUES = { + # Map class atrribute to element name + # These are relative to the "Activity" node + "milling_angle": "MillingAngle", + "lamella_offset": "OffsetFromLamella", + "trench_height_front": "FrontTrenchHeight", + "trench_height_rear": "RearTrenchHeight", + "width_overlap_front_left": "LamellaFrontLeftWidthOverlap", + "width_overlap_front_right": "LamellaFrontRightWidthOverlap", + "width_overlap_rear_left": "LamellaRearLeftWidthOverlap", + "width_overlap_rear_right": "LamellaRearRightWidthOverlap", +} + + +@dataclass +class MillingImage: file: Path timestamp: float @@ -45,6 +226,81 @@ def _number_from_name(name: str) -> int: ) +T = TypeVar("T") + + +def _parse_xml_text( + node: ET.Element, + path: str, + func: Callable[[str], T], +) -> T | None: + """ + Searches the XML Element using the provided path. If a matching node is found, + and it has a text attribute, processes the text using the provided function. + Otherwise, returns None. + """ + if (match := node.find(path)) is None or (text := match.text) is None: + return None + try: + return func(text) + except (ValueError, TypeError): + logger.error(f"Error parsing XML text {text} at path {path}", exc_info=True) + return None + + +SI_UNITS_KEY = { + # Length + "mm": 1e-3, + "um": 1e-6, + "μm": 1e-6, + "nm": 1e-9, + # Current + "mA": 1e-3, + "uA": 1e-6, + "μA": 1e-6, + "nA": 1e-9, + "pA": 1e-12, + # Voltage + "kV": 1e3, + "mV": 1e-3, + # Time + "ms": 1e-3, + "us": 1e-6, + "μs": 1e-6, + # Miscallenous + "%": 0.01, +} + + +def _parse_measurement(text: str): + """ + The measurements in the ProjectData.dat file are stored in a human-readable format + as strings. This helper function converts them into their base SI unit and returns + the value as a float. + + E.g. 5 um will be parsed as 0.000005 + """ + try: + value, unit = (s.strip() for s in text.split(" ", 1)) + return float(value) * SI_UNITS_KEY.get(unit, 1) + except ValueError: + logger.warning(f"Could not parse {value} as a measurement") + return None + + +def _parse_boolean(text: str): + """ + Parses the XML element's text field and returns it as a Python boolean + """ + if text.strip().lower() in ("true", "t", "1"): + return True + elif text.strip().lower() in ("false", "f", "0"): + return False + else: + logger.warning(f"Could not parse {text} as a boolean") + return None + + def _get_source(file_path: Path, environment: MurfeyInstanceEnvironment) -> Path | None: """ Returns the Path of the file on the client PC. @@ -84,8 +340,8 @@ def __init__( super().__init__("FIBContext", acquisition_software, token) self._basepath = basepath self._machine_config = machine_config - self._lamellae: dict[int, Lamella] = {} - self._milling_images: dict[int, list[MillingImage]] = {} + self._site_info: dict[int, LamellaSiteInfo] = {} + self._drift_correction_images: dict[int, list[MillingImage]] = {} def post_transfer( self, @@ -103,7 +359,25 @@ def post_transfer( # ----------------------------------------------------------------------------- if self._acquisition_software == "autotem": parts = transferred_file.parts - if "DCImages" in parts and transferred_file.suffix == ".png": + if transferred_file.name == "ProjectData.dat": + logger.info(f"Found metadata file {transferred_file} for parsing") + all_site_info_new = self._parse_autotem_metadata(transferred_file) + for site_num, site_info_new in all_site_info_new.items(): + # Post the data to the backend if it's been changed + if site_info_new.model_dump( + exclude_none=True + ) != self._site_info.get(site_num, LamellaSiteInfo()).model_dump( + exclude_none=True + ): + ############## + # Do POST here + ############## + + # Update existing dict + self._site_info[site_num] = site_info_new + logger.info(f"Updated metadata for site {site_num}") + + elif "DCImages" in parts and transferred_file.suffix == ".png": lamella_name = parts[parts.index("Sites") + 1] lamella_number = _number_from_name(lamella_name) time_from_name = transferred_file.name.split("-")[:6] @@ -117,11 +391,6 @@ def post_transfer( second=int(time_from_name[5]), ) ) - if not self._lamellae.get(lamella_number): - self._lamellae[lamella_number] = Lamella( - name=lamella_name, - number=lamella_number, - ) if not (source := _get_source(transferred_file, environment)): logger.warning(f"No source found for file {transferred_file}") return @@ -139,15 +408,15 @@ def post_transfer( f"File {transferred_file.name!r} not found on storage system" ) return - if not self._milling_images.get(lamella_number): - self._milling_images[lamella_number] = [ + if not self._drift_correction_images.get(lamella_number): + self._drift_correction_images[lamella_number] = [ MillingImage( timestamp=timestamp, file=destination_file, ) ] else: - self._milling_images[lamella_number].append( + self._drift_correction_images[lamella_number].append( MillingImage( timestamp=timestamp, file=destination_file, @@ -156,7 +425,8 @@ def post_transfer( gif_list = [ l.file for l in sorted( - self._milling_images[lamella_number], key=lambda x: x.timestamp + self._drift_correction_images[lamella_number], + key=lambda x: x.timestamp, ) ] raw_directory = Path( @@ -178,24 +448,7 @@ def post_transfer( "raw_directory": raw_directory, }, ) - elif transferred_file.name == "ProjectData.dat": - with open(transferred_file, "r") as dat: - try: - for_parsing = dat.read() - except Exception: - logger.warning(f"Failed to parse file {transferred_file}") - return - metadata = xmltodict.parse(for_parsing) - sites = metadata["AutoTEM"]["Project"]["Sites"]["Site"] - for site in sites: - number = _number_from_name(site["Name"]) - milling_angle = site["Workflow"]["Recipe"][0]["Activities"][ - "MillingAngleActivity" - ].get("MillingAngle") - if self._lamellae.get(number) and milling_angle: - self._lamellae[number]._replace( - angle=float(milling_angle.split(" ")[0]) - ) + # ----------------------------------------------------------------------------- # Maps # ----------------------------------------------------------------------------- @@ -233,6 +486,156 @@ def post_transfer( elif self._acquisition_software == "meteor": pass + def _parse_autotem_metadata(self, file: Path): + """ + Helper function to parse the 'ProjectData.dat' file produced by the AutoTEM. + This file contains metadata information on the milling sites set by the user, + along with the configured milling steps and their completion status. + """ + + try: + root = ET.parse(file).getroot() + except Exception: + logger.warning(f"Error parsing file {str(file)}", exc_info=True) + return None + + # Find all the Site nodes + if not (sites := root.findall(".//Sites/Site")): + logger.warning(f"No site information found in {str(file)}") + return None + + # Iterate through Site nodes + all_site_info: dict[int, LamellaSiteInfo] = {} + for site in sites: + # Extract site name and number + if (site_name := _parse_xml_text(site, "Name", str)) is None: + logger.warning("Current site doesn't have a name") + continue + site_num = _number_from_name(site_name) + site_info = LamellaSiteInfo(site_name=site_name) + + # Extract stage position information for all known stages in current site + stage_info = StagePositionInfo( + **{ + stage_name: StagePositionValues( + **{ + value_name: value + for value_name, value_path in STAGE_POSITION_VALUES.items() + if ( + value := _parse_xml_text( + stage, value_path, _parse_measurement + ) + ) + is not None + } + ) + for stage_name, stage_path in STAGE_POSITION_NAMES.items() + if (stage := site.find(stage_path)) is not None + } + ) + site_info.stage_info = stage_info + + # Find all Recipe nodes for the Site + if not (recipes := site.findall("Workflow/Recipe")): + # Early skip if no recipes are found + logger.warning(f"No recipes found for site {site_name}") + continue + + # Create dataclasses for each site + for recipe in recipes: + if (recipe_name := _parse_xml_text(recipe, "Name", str)) is None: + # Early skip if the Recipe has no Name + logger.warning("Recipe doesn't have a name, skipping") + continue + + # Find all the nodes under Activities + if (activities := recipe.find("Activities")) is None: + # Early skip if none exist + logger.warning(f"Recipe {recipe_name} doesn't have any activities") + continue + + # Iterate through the activities + for activity in activities: + if ( + activity_name := _parse_xml_text(activity, "Name", str) + ) is None: + # Early skip if activity has no name + logger.warning( + f"Activitiy in recipe {recipe_name} doesn't have a name, skipping" + ) + continue + + # Create a unique name based on recipe and activity names + unique_name = f"{recipe_name} - {activity_name}" + step_info = MillingStepInfo() + + # Update the corresponding milling activity field + step_info.enabled = _parse_xml_text( + activity, "IsEnabled", _parse_boolean + ) + step_info.status = _parse_xml_text( + activity, "ActivityMetadata/ExecutionResult", str + ) + step_info.execution_time = _parse_xml_text( + activity, "ExecutionTime", _parse_measurement + ) + + # Additional metadata extraction if elements are present + if activity.find("DepthCorrection") is not None: + step_info.depth_correction = _parse_xml_text( + activity, "DepthCorrection", float + ) + # Lamella milling geometries + for value_name, value_path in LAMELLA_MILLING_VALUES.items(): + step_info.__setattr__( + value_name, + _parse_xml_text( + activity, + value_path, + _parse_measurement, + ), + ) + # Beam information stored in either "BeamPreset" or "MillingPreset" + if activity.find("BeamPreset") is not None: + for value_name, value_path in BEAM_VALUES.items(): + match value_name: + case "beam_type": + step_info.beam_type = _parse_xml_text( + activity, f"BeamPreset/{value_path}", str + ) + case _: + step_info.__setattr__( + value_name, + _parse_xml_text( + activity, + f"BeamPreset/{value_path}", + _parse_measurement, + ), + ) + elif activity.find("MillingPreset") is not None: + for value_name, value_path in BEAM_VALUES.items(): + match value_name: + case "beam_type": + step_info.beam_type = _parse_xml_text( + activity, f"MillingPreset/{value_path}", str + ) + case _: + step_info.__setattr__( + value_name, + _parse_xml_text( + activity, + f"MillingPreset/{value_path}", + _parse_measurement, + ), + ) + # Add info for current step to the site info model + site_info.__setattr__(MILLING_STEP_NAMES[unique_name], step_info) + # Add info for current site to the dict + all_site_info[site_num] = site_info + + logger.info(f"Successfully extracted AutoTEM metadata from file {file}") + return all_site_info + def _register_atlas(self, file: Path, environment: MurfeyInstanceEnvironment): """ Constructs the URL and dictionary to be posted to the server, which then triggers From fdd145e637d87812974363602eeaa7c11087d9af Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Thu, 16 Apr 2026 18:49:45 +0100 Subject: [PATCH 03/19] Fixed broken tests --- tests/client/contexts/test_fib.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/client/contexts/test_fib.py b/tests/client/contexts/test_fib.py index b9344051..bbf35faa 100644 --- a/tests/client/contexts/test_fib.py +++ b/tests/client/contexts/test_fib.py @@ -173,8 +173,7 @@ def test_fib_autotem_context( rsync_basepath=Path(""), ) assert mock_capture_post.call_count == len(fib_autotem_dc_images) - assert len(context._milling) == num_lamellae - assert len(context._lamellae) == num_lamellae + assert len(context._drift_correction_images) == num_lamellae def test_fib_maps_context( From aee936200c29ff7e307b61dcf7ffaaef5ea7ab86 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 11:44:33 +0100 Subject: [PATCH 04/19] Moved FIB Pydantic models into 'murfey.util.models', as they will eventually be used by the backend as well --- src/murfey/client/contexts/fib.py | 114 ++---------------------------- src/murfey/util/models.py | 112 +++++++++++++++++++++++++++-- 2 files changed, 115 insertions(+), 111 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 57970492..095648d5 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -9,121 +9,21 @@ from pathlib import Path from typing import Callable, TypeVar -from pydantic import BaseModel, computed_field - from murfey.client.context import Context from murfey.client.instance_environment import MurfeyInstanceEnvironment from murfey.util.client import capture_post +from murfey.util.models import ( + LamellaSiteInfo, + MillingStepInfo, + StagePositionInfo, + StagePositionValues, +) logger = logging.getLogger("murfey.client.contexts.fib") lock = threading.Lock() -class StagePositionValues(BaseModel): - # Coordinates are in metres - x: float | None = None - y: float | None = None - z: float | None = None - # Angles are in degrees - rotation: float | None = None - tilt_alpha: float | None = None - - @computed_field - def slot_number(self) -> int | None: - if self.x is None: - return None - return 1 if self.x < 0 else 2 - - -class StagePositionInfo(BaseModel): - thinning: StagePositionValues | None = None - chunk_coincidence: StagePositionValues | None = None - thinning_stage: StagePositionValues | None = None - preparation: StagePositionValues | None = None - chunk_site: StagePositionValues | None = None - - -class WidthOverlap(BaseModel): - front_left: float | None = None - front_right: float | None = None - rear_left: float | None = None - rear_right: float | None = None - - -class MillingStepInfo(BaseModel): - """ - These are the parameters configured per milling step that we are interested - in tracking. Some attributes are present only for certain steps. - """ - - # Step setup - enabled: bool | None = None - status: str | None = None - execution_time: float | None = None - - # Beam info - beam_type: str | None = None - voltage: float | None = None - current: float | None = None - - # Lamella dimensions - - # Milling info - milling_angle: float | None = None - depth_correction: float | None = None - lamella_offset: float | None = None - trench_height_front: float | None = None - trench_height_rear: float | None = None - width_overlap_front_left: float | None = None - width_overlap_front_right: float | None = None - width_overlap_rear_left: float | None = None - width_overlap_rear_right: float | None = None - - -class LamellaSiteInfo(BaseModel): - """ - These parameters are not associated with a single milling step - """ - - site_name: str | None = None - stage_info: StagePositionInfo | None = None - - """ - These are all the possible processing steps found in the ProjectData.dat file. - It contains information on whether the step has been enabled as well as its - current staate. - """ - # Preparation stage - eucentric_tilt: MillingStepInfo | None = None - artificial_features: MillingStepInfo | None = None - milling_angle: MillingStepInfo | None = None - image_acquisition: MillingStepInfo | None = None - lamella_placement: MillingStepInfo | None = None - # Milling stage - delay_1: MillingStepInfo | None = None - reference_definition_1: MillingStepInfo | None = None - reference_definition_1_electron: MillingStepInfo | None = None - stress_relief_cuts: MillingStepInfo | None = None - reference_definition_2: MillingStepInfo | None = None - rough_milling: MillingStepInfo | None = None - rough_milling_electron: MillingStepInfo | None = None - reference_definition_3: MillingStepInfo | None = None - medium_milling: MillingStepInfo | None = None - medium_milling_electron: MillingStepInfo | None = None - fine_milling: MillingStepInfo | None = None - fine_milling_electron: MillingStepInfo | None = None - finer_milling: MillingStepInfo | None = None - finer_milling_electron: MillingStepInfo | None = None - # Thinning stage - delay_2: MillingStepInfo | None = None - polishing_1: MillingStepInfo | None = None - polishing_1_electron: MillingStepInfo | None = None - polishing_2: MillingStepInfo | None = None - polishing_2_ion: MillingStepInfo | None = None - polishing_2_electron: MillingStepInfo | None = None - - MILLING_STEP_NAMES = { # Map unique activity name to class attribute # Preparation stage @@ -570,7 +470,7 @@ def _parse_autotem_metadata(self, file: Path): step_info = MillingStepInfo() # Update the corresponding milling activity field - step_info.enabled = _parse_xml_text( + step_info.is_enabled = _parse_xml_text( activity, "IsEnabled", _parse_boolean ) step_info.status = _parse_xml_text( diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index 7f92a743..01511e7c 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -5,11 +5,12 @@ from pathlib import Path from typing import Any, Dict, List, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, computed_field, field_validator """ +======================================================================================= General Models -============== +======================================================================================= Models used in multiple workflows. """ @@ -91,8 +92,110 @@ class UpstreamFileRequestInfo(BaseModel): """ +======================================================================================= +FIB +======================================================================================= +Models related to the FIB workflow. +""" + + +class StagePositionValues(BaseModel): + # Coordinates are in metres + x: float | None = None + y: float | None = None + z: float | None = None + # Angles are in degrees + rotation: float | None = None + tilt_alpha: float | None = None + + @computed_field + def slot_number(self) -> int | None: + if self.x is None: + return None + return 1 if self.x < 0 else 2 + + +class StagePositionInfo(BaseModel): + thinning: StagePositionValues | None = None + chunk_coincidence: StagePositionValues | None = None + thinning_stage: StagePositionValues | None = None + preparation: StagePositionValues | None = None + chunk_site: StagePositionValues | None = None + + +class MillingStepInfo(BaseModel): + """ + These are the parameters configured per milling step that we are interested + in tracking. Some attributes are present only for certain steps. + """ + + # Step setup + is_enabled: bool | None = None + status: str | None = None + execution_time: float | None = None + + # Beam info + beam_type: str | None = None + voltage: float | None = None + current: float | None = None + + # Milling info + milling_angle: float | None = None + depth_correction: float | None = None + lamella_offset: float | None = None + trench_height_front: float | None = None + trench_height_rear: float | None = None + width_overlap_front_left: float | None = None + width_overlap_front_right: float | None = None + width_overlap_rear_left: float | None = None + width_overlap_rear_right: float | None = None + + +class LamellaSiteInfo(BaseModel): + """ + Pydantic model that stores all the metadata of interest for a single lamella + site. + """ + + # Values not associated with a single step + site_name: str | None = None + stage_info: StagePositionInfo | None = None + + # Processing steps supported by AutoTEM + # Preparation stage + eucentric_tilt: MillingStepInfo | None = None + artificial_features: MillingStepInfo | None = None + milling_angle: MillingStepInfo | None = None + image_acquisition: MillingStepInfo | None = None + lamella_placement: MillingStepInfo | None = None + # Milling stage + delay_1: MillingStepInfo | None = None + reference_definition_1: MillingStepInfo | None = None + reference_definition_1_electron: MillingStepInfo | None = None + stress_relief_cuts: MillingStepInfo | None = None + reference_definition_2: MillingStepInfo | None = None + rough_milling: MillingStepInfo | None = None + rough_milling_electron: MillingStepInfo | None = None + reference_definition_3: MillingStepInfo | None = None + medium_milling: MillingStepInfo | None = None + medium_milling_electron: MillingStepInfo | None = None + fine_milling: MillingStepInfo | None = None + fine_milling_electron: MillingStepInfo | None = None + finer_milling: MillingStepInfo | None = None + finer_milling_electron: MillingStepInfo | None = None + # Thinning stage + delay_2: MillingStepInfo | None = None + polishing_1: MillingStepInfo | None = None + polishing_1_electron: MillingStepInfo | None = None + polishing_2: MillingStepInfo | None = None + polishing_2_ion: MillingStepInfo | None = None + polishing_2_electron: MillingStepInfo | None = None + + +""" +======================================================================================= Single Particle Analysis -======================== +======================================================================================= Models related to the single-particle analysis workflow. """ @@ -218,8 +321,9 @@ class Token(BaseModel): """ +======================================================================================= Tomography -========== +======================================================================================= Models related to the tomographic reconstruction workflow. """ From b8675c4b3b75c7871fb6a291c3e1af356f6c268e Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 11:56:37 +0100 Subject: [PATCH 05/19] Added placeholder endpoint for use with registering milling progress --- src/murfey/server/api/workflow_fib.py | 14 ++++++++++++++ src/murfey/util/route_manifest.yaml | 7 +++++++ 2 files changed, 21 insertions(+) diff --git a/src/murfey/server/api/workflow_fib.py b/src/murfey/server/api/workflow_fib.py index 5e21f497..44406757 100644 --- a/src/murfey/server/api/workflow_fib.py +++ b/src/murfey/server/api/workflow_fib.py @@ -1,3 +1,4 @@ +import json import logging from importlib.metadata import entry_points from pathlib import Path @@ -8,6 +9,7 @@ from murfey.server.api.auth import validate_instrument_token from murfey.server.murfey_db import murfey_db +from murfey.util.models import LamellaSiteInfo logger = logging.getLogger("murfey.server.api.workflow_fib") @@ -43,3 +45,15 @@ def register_fib_atlas( file=fib_atlas_info.file, murfey_db=db, ) + + +@router.post("/sessions/{session_id}/register_milling_progress") +def register_fib_milling_progress( + session_id: int, + site_info: LamellaSiteInfo, + db: Session = murfey_db, +): + logger.debug( + "Received the following FIB metadata for registration:\n" + f"{json.dumps(site_info.model_dump(exclude_none=True), indent=2, default=str)}" + ) diff --git a/src/murfey/util/route_manifest.yaml b/src/murfey/util/route_manifest.yaml index 787760f2..dcbc9ece 100644 --- a/src/murfey/util/route_manifest.yaml +++ b/src/murfey/util/route_manifest.yaml @@ -1440,3 +1440,10 @@ murfey.server.api.workflow_fib.router: type: int methods: - POST + - path: /workflow/fib/sessions/{session_id}/register_milling_progress + function: register_fib_milling_progress + path_params: + - name: session_id + type: int + methods: + - POST From 8d3fbdf78c500afb292cbdef89509cccf3ec53a2 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 12:02:34 +0100 Subject: [PATCH 06/19] Added logic to FIBContext to post milling progress to backend --- src/murfey/client/contexts/fib.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 095648d5..84edae4a 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -264,14 +264,22 @@ def post_transfer( all_site_info_new = self._parse_autotem_metadata(transferred_file) for site_num, site_info_new in all_site_info_new.items(): # Post the data to the backend if it's been changed - if site_info_new.model_dump( - exclude_none=True + if ( + data := site_info_new.model_dump(exclude_none=True) ) != self._site_info.get(site_num, LamellaSiteInfo()).model_dump( exclude_none=True ): - ############## - # Do POST here - ############## + # Post to the backend + capture_post( + base_url=str(environment.url.geturl()), + router_name="workflow_fib.router", + function_name="register_fib_milling_progress", + token=self._token, + instrument_name=environment.instrument_name, + data=data, + # Endpoint kwargs + session_id=environment.murfey_session, + ) # Update existing dict self._site_info[site_num] = site_info_new @@ -339,14 +347,15 @@ def post_transfer( function_name="make_gif", token=self._token, instrument_name=environment.instrument_name, - year=datetime.now().year, - visit_name=environment.visit, - session_id=environment.murfey_session, data={ "lamella_number": lamella_number, "images": [str(file) for file in gif_list], "raw_directory": raw_directory, }, + # Endpoint kwargs + year=datetime.now().year, + visit_name=environment.visit, + session_id=environment.murfey_session, ) # ----------------------------------------------------------------------------- @@ -550,6 +559,7 @@ def _register_atlas(self, file: Path, environment: MurfeyInstanceEnvironment): token=self._token, instrument_name=environment.instrument_name, data={"file": str(file)}, + # Endpoint kwargs session_id=environment.murfey_session, ) logger.info(f"Registering atlas image {file.name!r}") From 9e94cd24050a9d753beba824cd59931fe75d19b2 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 12:32:55 +0100 Subject: [PATCH 07/19] Add logic to create backup of the 'ProjectData.dat' file whenever it passes through the FIBContext --- src/murfey/client/contexts/fib.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 84edae4a..1a712b06 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -2,7 +2,9 @@ import logging import re +import shutil import threading +import time import xml.etree.ElementTree as ET from dataclasses import dataclass from datetime import datetime @@ -261,6 +263,19 @@ def post_transfer( parts = transferred_file.parts if transferred_file.name == "ProjectData.dat": logger.info(f"Found metadata file {transferred_file} for parsing") + + # Create a backup copy of the file + backup_file = ( + transferred_file.parent + / f"{transferred_file.stem}-{time.time_ns()}{transferred_file.suffix}" + ) + shutil.copyfile( + transferred_file, + backup_file, + ) + logger.info(f"Saved snapshot of the metadata file as {backup_file}") + + # Parse the metadata file all_site_info_new = self._parse_autotem_metadata(transferred_file) for site_num, site_info_new in all_site_info_new.items(): # Post the data to the backend if it's been changed @@ -269,7 +284,6 @@ def post_transfer( ) != self._site_info.get(site_num, LamellaSiteInfo()).model_dump( exclude_none=True ): - # Post to the backend capture_post( base_url=str(environment.url.geturl()), router_name="workflow_fib.router", @@ -283,7 +297,7 @@ def post_transfer( # Update existing dict self._site_info[site_num] = site_info_new - logger.info(f"Updated metadata for site {site_num}") + logger.info(f"Updating metadata for site {site_num}") elif "DCImages" in parts and transferred_file.suffix == ".png": lamella_name = parts[parts.index("Sites") + 1] From 678a2a3ea11917d6398c42f20b1d47c149b05289 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 15:15:20 +0100 Subject: [PATCH 08/19] Updated some field names, added some fields, and added docstrings explaining the contents of 'StagePositionInfo' --- src/murfey/util/models.py | 37 ++++++++++++++++++++++++++++++++----- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index 01511e7c..7bf7d2d8 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -116,11 +116,33 @@ def slot_number(self) -> int | None: class StagePositionInfo(BaseModel): - thinning: StagePositionValues | None = None - chunk_coincidence: StagePositionValues | None = None - thinning_stage: StagePositionValues | None = None - preparation: StagePositionValues | None = None - chunk_site: StagePositionValues | None = None + """ + Stage position values associated with the different stages of the milling + process. The XML paths they're associated with (with "Site" as the parent + node) are indicated in the comments. + + The image acquisition steps have a "SiteLocationType" field that appear to + be associated with either "ChunkSiteLocation" or "ThinningSiteLocation". + "ThinningStagePosition" appears to be a duplicate of "ThinningSiteLocation" + so far, and it is unclearf for now what stages "PreparationSiteLocation" + and "ChunkCoincidenceStagePosition" currently correspond to. + """ + + preparation: StagePositionValues | None = ( + None # PreparationSiteLocation/StagePosition/StagePosition + ) + chunk_coincidence: StagePositionValues | None = ( + None # Parameters/ChunkCoincidenceStagePosition/StagePosition + ) + chunk: StagePositionValues | None = ( + None # ChunkSiteLocation/StagePosition/StagePosition + ) + thinning_1: StagePositionValues | None = ( + None # Parameters/ThinningStagePosition/StagePosition + ) + thinning_2: StagePositionValues | None = ( + None # ThinningSiteLocation/StagePosition/StagePosition + ) class MillingStepInfo(BaseModel): @@ -130,10 +152,15 @@ class MillingStepInfo(BaseModel): """ # Step setup + step_name: str | None = None + recipe_name: str | None = None is_enabled: bool | None = None status: str | None = None execution_time: float | None = None + # Associated stage position information + site_location_type: str | None = None + # Beam info beam_type: str | None = None voltage: float | None = None From fcfc5c6dc38c7cdb779b4ef5db21ddfd9c686450 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 15:16:52 +0100 Subject: [PATCH 09/19] Extract more fields from the FIB AutoTEM metadata --- src/murfey/client/contexts/fib.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 1a712b06..9a0b9304 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -73,13 +73,11 @@ STAGE_POSITION_NAMES = { # Map class attribute to element name # Paths are relative to the "Site" node - "thinning": "ThinningSiteLocation/StagePosition/StagePosition", - # These stage position fields are also present - # but it's unclear which milling steps they correspond to - "chunk_coincidence": "Parameters/ChunkCoincidenceStagePosition/StagePosition", - "thinning_stage": "Parameters/ThinningStagePosition/StagePosition", "preparation": "PreparationSiteLocation/StagePosition/StagePosition", - "chunk_site": "ChunkSiteLocation/StagePosition/StagePosition", + "chunk_coincidence": "Parameters/ChunkCoincidenceStagePosition/StagePosition", + "chunk": "ChunkSiteLocation/StagePosition/StagePosition", + "thinning_1": "Parameters/ThinningStagePosition/StagePosition", + "thinning_2": "ThinningSiteLocation/StagePosition/StagePosition", } @@ -490,7 +488,9 @@ def _parse_autotem_metadata(self, file: Path): # Create a unique name based on recipe and activity names unique_name = f"{recipe_name} - {activity_name}" - step_info = MillingStepInfo() + step_info = MillingStepInfo( + step_name=activity_name, recipe_name=recipe_name + ) # Update the corresponding milling activity field step_info.is_enabled = _parse_xml_text( @@ -504,10 +504,12 @@ def _parse_autotem_metadata(self, file: Path): ) # Additional metadata extraction if elements are present - if activity.find("DepthCorrection") is not None: - step_info.depth_correction = _parse_xml_text( - activity, "DepthCorrection", float - ) + step_info.site_location_type = _parse_xml_text( + activity, "SiteLocationType", str + ) + step_info.depth_correction = _parse_xml_text( + activity, "DepthCorrection", float + ) # Lamella milling geometries for value_name, value_path in LAMELLA_MILLING_VALUES.items(): step_info.__setattr__( From fa1d7396684007daf0c47bb59f71ae2c44b66ae1 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 15:20:10 +0100 Subject: [PATCH 10/19] Updated test logic for FIB AutoTEM context to parametrise it --- tests/client/contexts/test_fib.py | 70 +++++++++++++++++++++++-------- 1 file changed, 53 insertions(+), 17 deletions(-) diff --git a/tests/client/contexts/test_fib.py b/tests/client/contexts/test_fib.py index bbf35faa..88538b6d 100644 --- a/tests/client/contexts/test_fib.py +++ b/tests/client/contexts/test_fib.py @@ -129,14 +129,32 @@ def test_file_transferred_to( ) == destination_dir / file.relative_to(visit_dir) +@pytest.mark.parametrize( + "test_params", + ( # File type to test | Use environment? | Find source? | Find destination? + ("drift_correction", True, True, True), + ("drift_correction", False, True, True), + ("drift_correction", True, False, True), + ("drift_correction", True, True, False), + ), +) def test_fib_autotem_context( mocker: MockerFixture, + test_params: tuple[str, bool, bool, bool], tmp_path: Path, visit_dir: Path, fib_autotem_dc_images: list[Path], ): + # Unpack test params + file_type, use_env, find_source, find_dst = test_params + # Mock the environment - mock_environment = MagicMock() + mock_environment = None + if use_env: + mock_environment = MagicMock() + + # Mock the logger to check if specific logs are triggered + mock_logger = mocker.patch("murfey.client.contexts.fib.logger") # Create a list of destinations destination_dir = tmp_path / "fib" / "data" / "current_year" / "visit" @@ -145,11 +163,9 @@ def test_fib_autotem_context( ] # Mock the functions used in 'post_transfer' - mock_get_source = mocker.patch( - "murfey.client.contexts.fib._get_source", return_value=tmp_path - ) + mock_get_source = mocker.patch("murfey.client.contexts.fib._get_source") mock_file_transferred_to = mocker.patch( - "murfey.client.contexts.fib._file_transferred_to", side_effect=destination_files + "murfey.client.contexts.fib._file_transferred_to" ) mock_capture_post = mocker.patch("murfey.client.contexts.fib.capture_post") @@ -162,18 +178,38 @@ def test_fib_autotem_context( token="", ) - # Parse images one-by-one and check that expected calls were made - for file in fib_autotem_dc_images: - context.post_transfer(file, environment=mock_environment) - mock_get_source.assert_called_with(file, mock_environment) - mock_file_transferred_to.assert_called_with( - environment=mock_environment, - source=basepath, - file_path=file, - rsync_basepath=Path(""), - ) - assert mock_capture_post.call_count == len(fib_autotem_dc_images) - assert len(context._drift_correction_images) == num_lamellae + match file_type: + case "drift_correction": + # Add case-specific return values and side-effects to the mocks + mock_get_source.return_value = tmp_path if find_source else None + if find_dst: + mock_file_transferred_to.side_effect = destination_files + else: + mock_file_transferred_to.return_value = None + + # Parse images one-by-one and check that expected calls were made + for file in fib_autotem_dc_images: + context.post_transfer(file, environment=mock_environment) + if not use_env: + mock_logger.warning.assert_called_with("No environment passed in") + elif not find_source: + mock_logger.warning.assert_called_with( + f"No source found for file {file}" + ) + elif not find_dst: + mock_logger.warning.assert_called_with( + f"File {file.name!r} not found on storage system" + ) + else: + mock_get_source.assert_called_with(file, mock_environment) + mock_file_transferred_to.assert_called_with( + environment=mock_environment, + source=basepath, + file_path=file, + rsync_basepath=Path(""), + ) + assert mock_capture_post.call_count == len(fib_autotem_dc_images) + assert len(context._drift_correction_images) == num_lamellae def test_fib_maps_context( From 250f12f4dcfac95137051c76658a4317e9d491ed Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 16:00:52 +0100 Subject: [PATCH 11/19] Nest information on milling steps under the 'steps' field in 'LamellaSiteInfo'; store project name and site number as well --- src/murfey/util/models.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index 7bf7d2d8..d438b5f3 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -178,16 +178,7 @@ class MillingStepInfo(BaseModel): width_overlap_rear_right: float | None = None -class LamellaSiteInfo(BaseModel): - """ - Pydantic model that stores all the metadata of interest for a single lamella - site. - """ - - # Values not associated with a single step - site_name: str | None = None - stage_info: StagePositionInfo | None = None - +class MillingSteps(BaseModel): # Processing steps supported by AutoTEM # Preparation stage eucentric_tilt: MillingStepInfo | None = None @@ -219,6 +210,20 @@ class LamellaSiteInfo(BaseModel): polishing_2_electron: MillingStepInfo | None = None +class LamellaSiteInfo(BaseModel): + """ + Pydantic model that stores all the metadata of interest for a single lamella + site. + """ + + # Values not associated with a single step + project_name: str | None = None + site_name: str | None = None + site_number: int | None = None + stage_info: StagePositionInfo | None = None + steps: MillingSteps | None = None + + """ ======================================================================================= Single Particle Analysis From eb33da92b650f685494b9a901bd9661db8abf11d Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 16:03:11 +0100 Subject: [PATCH 12/19] Add logic to extract project name; update metadata insertion logic into the 'LamellaSiteInfo' model --- src/murfey/client/contexts/fib.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 9a0b9304..4873a729 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -17,6 +17,7 @@ from murfey.util.models import ( LamellaSiteInfo, MillingStepInfo, + MillingSteps, StagePositionInfo, StagePositionValues, ) @@ -420,6 +421,11 @@ def _parse_autotem_metadata(self, file: Path): logger.warning(f"Error parsing file {str(file)}", exc_info=True) return None + # Get the project name + if (project_name := _parse_xml_text(root, ".//Project/Name", str)) is None: + logger.warning("Metadata file has no project name") + return None + # Find all the Site nodes if not (sites := root.findall(".//Sites/Site")): logger.warning(f"No site information found in {str(file)}") @@ -433,7 +439,12 @@ def _parse_autotem_metadata(self, file: Path): logger.warning("Current site doesn't have a name") continue site_num = _number_from_name(site_name) - site_info = LamellaSiteInfo(site_name=site_name) + site_info = LamellaSiteInfo( + project_name=project_name, + site_name=site_name, + site_number=site_num, + steps=MillingSteps(), + ) # Extract stage position information for all known stages in current site stage_info = StagePositionInfo( @@ -554,7 +565,10 @@ def _parse_autotem_metadata(self, file: Path): ), ) # Add info for current step to the site info model - site_info.__setattr__(MILLING_STEP_NAMES[unique_name], step_info) + site_info.steps.__setattr__( + MILLING_STEP_NAMES[unique_name], step_info + ) + # Add info for current site to the dict all_site_info[site_num] = site_info From 0bc637702ae4bd5920a1c501518d7e0d9ca641d9 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 18:16:32 +0100 Subject: [PATCH 13/19] Use names that more closely match the names in the metadata --- src/murfey/util/models.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/murfey/util/models.py b/src/murfey/util/models.py index d438b5f3..86212f7f 100644 --- a/src/murfey/util/models.py +++ b/src/murfey/util/models.py @@ -188,13 +188,13 @@ class MillingSteps(BaseModel): lamella_placement: MillingStepInfo | None = None # Milling stage delay_1: MillingStepInfo | None = None - reference_definition_1: MillingStepInfo | None = None - reference_definition_1_electron: MillingStepInfo | None = None + reference_definition: MillingStepInfo | None = None + reference_definition_electron: MillingStepInfo | None = None stress_relief_cuts: MillingStepInfo | None = None - reference_definition_2: MillingStepInfo | None = None + reference_redefinition_1: MillingStepInfo | None = None rough_milling: MillingStepInfo | None = None rough_milling_electron: MillingStepInfo | None = None - reference_definition_3: MillingStepInfo | None = None + reference_redefinition_2: MillingStepInfo | None = None medium_milling: MillingStepInfo | None = None medium_milling_electron: MillingStepInfo | None = None fine_milling: MillingStepInfo | None = None From d8a4e1f15db89de9791dda3702e3edffa5ea197c Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 18:18:41 +0100 Subject: [PATCH 14/19] Streamlined metadata extraction of Activity values by iterating through a tuple of the model field name, XML path, and the function to apply --- src/murfey/client/contexts/fib.py | 229 ++++++++++++------------------ 1 file changed, 89 insertions(+), 140 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 4873a729..97ae0c2d 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -9,7 +9,7 @@ from dataclasses import dataclass from datetime import datetime from pathlib import Path -from typing import Callable, TypeVar +from typing import Callable, Type, TypeVar from murfey.client.context import Context from murfey.client.instance_environment import MurfeyInstanceEnvironment @@ -27,84 +27,6 @@ lock = threading.Lock() -MILLING_STEP_NAMES = { - # Map unique activity name to class attribute - # Preparation stage - "Preparation - Eucentric Tilt": "eucentric_tilt", - "Preparation - Artificial Features": "artificial_features", - "Preparation - Milling Angle": "milling_angle", - "Preparation - Image Acquisition": "image_acquisition", - "Preparation - Lamella Placement": "lamella_placement", - # Milling stage - "Milling - Delay": "delay_1", - "Milling - Reference Definition": "reference_definition_1", - "Milling - Electron Reference Definition": "reference_definition_1_electron", - "Milling - Stress Relief Cuts": "stress_relief_cuts", - "Milling - Reference Redefinition 1": "reference_definition_2", - "Milling - Rough Milling": "rough_milling", - "Milling - Rough Milling - Electron Image": "rough_milling_electron", - "Milling - Reference Redefinition 2": "reference_definition_3", - "Milling - Medium Milling": "medium_milling", - "Milling - Medium Milling - Electron Image": "medium_milling_electron", - "Milling - Fine Milling": "fine_milling", - "Milling - Fine Milling - Electron Image": "fine_milling_electron", - "Milling - Finer Milling": "finer_milling", - "Milling - Finer Milling - Electron Image": "finer_milling_electron", - # Thinning stage - "Thinning - Delay": "delay_2", - "Thinning - Polishing 1": "polishing_1", - "Thinning - Polishing 1 - Electron Image": "polishing_1_electron", - "Thinning - Polishing 2": "polishing_2", - "Thinning - Polishing 2 - Ion Image": "polishing_2_ion", - "Thinning - Polishing 2 - Electron Image": "polishing_2_electron", -} - - -STAGE_POSITION_VALUES = { - # Map class attribute to element name - # Paths are relative to the "StagePosition" node - "x": "X", - "y": "Y", - "z": "Z", - "rotation": "R", - "tilt_alpha": "AT", -} - - -STAGE_POSITION_NAMES = { - # Map class attribute to element name - # Paths are relative to the "Site" node - "preparation": "PreparationSiteLocation/StagePosition/StagePosition", - "chunk_coincidence": "Parameters/ChunkCoincidenceStagePosition/StagePosition", - "chunk": "ChunkSiteLocation/StagePosition/StagePosition", - "thinning_1": "Parameters/ThinningStagePosition/StagePosition", - "thinning_2": "ThinningSiteLocation/StagePosition/StagePosition", -} - - -BEAM_VALUES = { - # Map class attribute to element name - # These are relative to the "MillingPreset" or "BeamPreset" node - "beam_type": "BeamType", - "voltage": "HighVoltage", - "current": "BeamCurrent", -} - - -LAMELLA_MILLING_VALUES = { - # Map class atrribute to element name - # These are relative to the "Activity" node - "milling_angle": "MillingAngle", - "lamella_offset": "OffsetFromLamella", - "trench_height_front": "FrontTrenchHeight", - "trench_height_rear": "RearTrenchHeight", - "width_overlap_front_left": "LamellaFrontLeftWidthOverlap", - "width_overlap_front_right": "LamellaFrontRightWidthOverlap", - "width_overlap_rear_left": "LamellaRearLeftWidthOverlap", - "width_overlap_rear_right": "LamellaRearRightWidthOverlap", -} - - @dataclass class MillingImage: file: Path @@ -133,7 +55,7 @@ def _number_from_name(name: str) -> int: def _parse_xml_text( node: ET.Element, path: str, - func: Callable[[str], T], + func: Callable[[str], T] | Type, ) -> T | None: """ Searches the XML Element using the provided path. If a matching node is found, @@ -202,6 +124,89 @@ def _parse_boolean(text: str): return None +MILLING_STEP_NAMES = { + # Map unique activity name to class attribute + # Preparation stage + "Preparation - Eucentric Tilt": "eucentric_tilt", + "Preparation - Artificial Features": "artificial_features", + "Preparation - Milling Angle": "milling_angle", + "Preparation - Image Acquisition": "image_acquisition", + "Preparation - Lamella Placement": "lamella_placement", + # Milling stage + "Milling - Delay": "delay_1", + "Milling - Reference Definition": "reference_definition", + "Milling - Electron Reference Definition": "reference_definition_electron", + "Milling - Stress Relief Cuts": "stress_relief_cuts", + "Milling - Reference Redefinition 1": "reference_redefinition_1", + "Milling - Rough Milling": "rough_milling", + "Milling - Rough Milling - Electron Image": "rough_milling_electron", + "Milling - Reference Redefinition 2": "reference_redefinition_2", + "Milling - Medium Milling": "medium_milling", + "Milling - Medium Milling - Electron Image": "medium_milling_electron", + "Milling - Fine Milling": "fine_milling", + "Milling - Fine Milling - Electron Image": "fine_milling_electron", + "Milling - Finer Milling": "finer_milling", + "Milling - Finer Milling - Electron Image": "finer_milling_electron", + # Thinning stage + "Thinning - Delay": "delay_2", + "Thinning - Polishing 1": "polishing_1", + "Thinning - Polishing 1 - Electron Image": "polishing_1_electron", + "Thinning - Polishing 2": "polishing_2", + "Thinning - Polishing 2 - Ion Image": "polishing_2_ion", + "Thinning - Polishing 2 - Electron Image": "polishing_2_electron", +} + + +STAGE_POSITION_VALUES = { + # Map class attribute to element name + # Paths are relative to the "StagePosition" node + "x": "X", + "y": "Y", + "z": "Z", + "rotation": "R", + "tilt_alpha": "AT", +} + + +STAGE_POSITION_NAMES = { + # Map class attribute to element name + # Paths are relative to the "Site" node + "preparation": "PreparationSiteLocation/StagePosition/StagePosition", + "chunk_coincidence": "Parameters/ChunkCoincidenceStagePosition/StagePosition", + "chunk": "ChunkSiteLocation/StagePosition/StagePosition", + "thinning_1": "Parameters/ThinningStagePosition/StagePosition", + "thinning_2": "ThinningSiteLocation/StagePosition/StagePosition", +} + + +ACTIVITY_FIELD_MAP = ( + # Model field name | Path relative to "Activity" | Function to apply + # These are relative to the "Activity" node + # Common parameters + ("is_enabled", "IsEnabled", _parse_boolean), + ("status", "ActivityMetadata/ExecutionResult", str), + ("execution_time", "ExecutionTime", _parse_measurement), + # Milling/Imaging beam parameters + ("site_location_type", "SiteLocationType", str), + ("beam_type", "MillingPreset/BeamType", str), + ("beam_type", "BeamPreset/BeamType", str), + ("voltage", "MillingPreset/HighVoltage", _parse_measurement), + ("voltage", "BeamPreset/HighVoltage", _parse_measurement), + ("current", "MillingPreset/BeamCurrent", _parse_measurement), + ("current", "BeamPreset/BeamCurrent", _parse_measurement), + # Milling parameters + ("depth_correction", "DepthCorrection", float), + ("milling_angle", "MillingAngle", _parse_measurement), + ("lamella_offset", "OffsetFromLamella", _parse_measurement), + ("trench_height_front", "FrontTrenchHeight", _parse_measurement), + ("trench_height_rear", "RearTrenchHeight", _parse_measurement), + ("width_overlap_front_left", "LamellaFrontLeftWidthOverlap", _parse_measurement), + ("width_overlap_front_right", "LamellaFrontRightWidthOverlap", _parse_measurement), + ("width_overlap_rear_left", "LamellaRearLeftWidthOverlap", _parse_measurement), + ("width_overlap_rear_right", "LamellaRearRightWidthOverlap", _parse_measurement), +) + + def _get_source(file_path: Path, environment: MurfeyInstanceEnvironment) -> Path | None: """ Returns the Path of the file on the client PC. @@ -503,67 +508,11 @@ def _parse_autotem_metadata(self, file: Path): step_name=activity_name, recipe_name=recipe_name ) - # Update the corresponding milling activity field - step_info.is_enabled = _parse_xml_text( - activity, "IsEnabled", _parse_boolean - ) - step_info.status = _parse_xml_text( - activity, "ActivityMetadata/ExecutionResult", str - ) - step_info.execution_time = _parse_xml_text( - activity, "ExecutionTime", _parse_measurement - ) + # Iteratively update fields in the MillingSteps model it's not None + for field, path, func in ACTIVITY_FIELD_MAP: + if (value := _parse_xml_text(activity, path, func)) is not None: + step_info.__setattr__(field, value) - # Additional metadata extraction if elements are present - step_info.site_location_type = _parse_xml_text( - activity, "SiteLocationType", str - ) - step_info.depth_correction = _parse_xml_text( - activity, "DepthCorrection", float - ) - # Lamella milling geometries - for value_name, value_path in LAMELLA_MILLING_VALUES.items(): - step_info.__setattr__( - value_name, - _parse_xml_text( - activity, - value_path, - _parse_measurement, - ), - ) - # Beam information stored in either "BeamPreset" or "MillingPreset" - if activity.find("BeamPreset") is not None: - for value_name, value_path in BEAM_VALUES.items(): - match value_name: - case "beam_type": - step_info.beam_type = _parse_xml_text( - activity, f"BeamPreset/{value_path}", str - ) - case _: - step_info.__setattr__( - value_name, - _parse_xml_text( - activity, - f"BeamPreset/{value_path}", - _parse_measurement, - ), - ) - elif activity.find("MillingPreset") is not None: - for value_name, value_path in BEAM_VALUES.items(): - match value_name: - case "beam_type": - step_info.beam_type = _parse_xml_text( - activity, f"MillingPreset/{value_path}", str - ) - case _: - step_info.__setattr__( - value_name, - _parse_xml_text( - activity, - f"MillingPreset/{value_path}", - _parse_measurement, - ), - ) # Add info for current step to the site info model site_info.steps.__setattr__( MILLING_STEP_NAMES[unique_name], step_info From 5059f4eb50de53b106ebfe31221426de3c4e3034 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Fri, 17 Apr 2026 18:44:38 +0100 Subject: [PATCH 15/19] More compact rewrite of the stage position extraction logic --- src/murfey/client/contexts/fib.py | 29 +++++++++++------------------ 1 file changed, 11 insertions(+), 18 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 97ae0c2d..305b22f8 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -452,25 +452,18 @@ def _parse_autotem_metadata(self, file: Path): ) # Extract stage position information for all known stages in current site - stage_info = StagePositionInfo( - **{ - stage_name: StagePositionValues( - **{ - value_name: value - for value_name, value_path in STAGE_POSITION_VALUES.items() - if ( - value := _parse_xml_text( - stage, value_path, _parse_measurement - ) + site_info.stage_info = StagePositionInfo() + for stage_name, stage_path in STAGE_POSITION_NAMES.items(): + if (stage := site.find(stage_path)) is not None: + stage_values = StagePositionValues() + for value_name, value_path in STAGE_POSITION_VALUES.items(): + if ( + value := _parse_xml_text( + stage, value_path, _parse_measurement ) - is not None - } - ) - for stage_name, stage_path in STAGE_POSITION_NAMES.items() - if (stage := site.find(stage_path)) is not None - } - ) - site_info.stage_info = stage_info + ) is not None: + stage_values.__setattr__(value_name, value) + site_info.stage_info.__setattr__(stage_name, stage_values) # Find all Recipe nodes for the Site if not (recipes := site.findall("Workflow/Recipe")): From f269d7e6c78f729a47e893a2b6d93a79c8ac156b Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Mon, 20 Apr 2026 11:03:44 +0100 Subject: [PATCH 16/19] Make returns explicit --- src/murfey/client/contexts/fib.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 305b22f8..2c28fa0b 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -258,7 +258,7 @@ def post_transfer( super().post_transfer(transferred_file, environment=environment, **kwargs) if environment is None: logger.warning("No environment passed in") - return + return None # ----------------------------------------------------------------------------- # AutoTEM @@ -302,6 +302,7 @@ def post_transfer( # Update existing dict self._site_info[site_num] = site_info_new logger.info(f"Updating metadata for site {site_num}") + return None elif "DCImages" in parts and transferred_file.suffix == ".png": lamella_name = parts[parts.index("Sites") + 1] @@ -375,6 +376,7 @@ def post_transfer( visit_name=environment.visit, session_id=environment.murfey_session, ) + return None # ----------------------------------------------------------------------------- # Maps @@ -387,7 +389,7 @@ def post_transfer( ): if not (source := _get_source(transferred_file, environment)): logger.warning(f"No source found for file {transferred_file}") - return + return None if not ( destination_file := _file_transferred_to( environment=environment, @@ -401,11 +403,11 @@ def post_transfer( logger.warning( f"File {transferred_file.name!r} not found on storage system" ) - return + return None # Register image in database self._register_atlas(destination_file, environment) - return + return None # ----------------------------------------------------------------------------- # Meteor From fc7e1aa46f43b775eee26d880b0308e097a9566f Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Mon, 20 Apr 2026 12:15:14 +0100 Subject: [PATCH 17/19] Return empty dict instead of None for '_parse_autotem_metadata' function --- src/murfey/client/contexts/fib.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/murfey/client/contexts/fib.py b/src/murfey/client/contexts/fib.py index 2c28fa0b..6dca3f29 100644 --- a/src/murfey/client/contexts/fib.py +++ b/src/murfey/client/contexts/fib.py @@ -422,24 +422,24 @@ def _parse_autotem_metadata(self, file: Path): along with the configured milling steps and their completion status. """ + all_site_info: dict[int, LamellaSiteInfo] = {} try: root = ET.parse(file).getroot() except Exception: logger.warning(f"Error parsing file {str(file)}", exc_info=True) - return None + return all_site_info # Get the project name if (project_name := _parse_xml_text(root, ".//Project/Name", str)) is None: logger.warning("Metadata file has no project name") - return None + return all_site_info # Find all the Site nodes if not (sites := root.findall(".//Sites/Site")): logger.warning(f"No site information found in {str(file)}") - return None + return all_site_info # Iterate through Site nodes - all_site_info: dict[int, LamellaSiteInfo] = {} for site in sites: # Extract site name and number if (site_name := _parse_xml_text(site, "Name", str)) is None: From 3bc00378239cfe9e8b855b9131f00682c4017910 Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Mon, 20 Apr 2026 12:17:50 +0100 Subject: [PATCH 18/19] Added helper function to create test FIB AutoTEM metadata and added tests for '_post_transfer' for the 'ProjectData.dat' logic block --- tests/client/contexts/test_fib.py | 533 +++++++++++++++++++++++++++--- 1 file changed, 487 insertions(+), 46 deletions(-) diff --git a/tests/client/contexts/test_fib.py b/tests/client/contexts/test_fib.py index 88538b6d..5c271560 100644 --- a/tests/client/contexts/test_fib.py +++ b/tests/client/contexts/test_fib.py @@ -1,3 +1,4 @@ +import xml.etree.ElementTree as ET from pathlib import Path from unittest import mock from unittest.mock import MagicMock @@ -6,21 +7,336 @@ from pytest_mock import MockerFixture from murfey.client.contexts.fib import ( + MILLING_STEP_NAMES, + STAGE_POSITION_NAMES, + STAGE_POSITION_VALUES, FIBContext, _file_transferred_to, _get_source, _number_from_name, ) +# Mock session values +num_lamellae = 5 +visit_name = "cm12345-6" +project_name = visit_name.replace("-", "_") + + # ------------------------------------------------------------------------------------- # FIBContext test utilty functions and fixtures # ------------------------------------------------------------------------------------- -num_lamellae = 5 + + +def _create_milling_steps(): + # Create a dict with the milling steps sorted by the recipe + milling_steps: dict[str, list[str]] = {} + for key in MILLING_STEP_NAMES.keys(): + recipe, step = [s.strip() for s in key.split(" - ", 1)] + if not milling_steps.get(recipe, []): + milling_steps[recipe] = [step] + else: + milling_steps[recipe].append(step) + return milling_steps + + +milling_steps: dict[str, list[str]] = _create_milling_steps() + +# Test values to insert into the mock metadata +stage_values = { + "x": "3.15911143012073 mm", + "y": "-0.627002440038438 mm", + "z": "32.0781899453239 mm", + "rotation": "284.999355310423 °", + "tilt_alpha": "44.998223254214 °", +} @pytest.fixture def visit_dir(tmp_path: Path): - return tmp_path / "visit" + return tmp_path / visit_name + + +def _create_stage_position_node(stage_values: dict[str, str]): + stage_position_node = ET.Element("StagePosition") + for key, value in stage_values.items(): + node = ET.Element(STAGE_POSITION_VALUES[key]) + node.text = value + stage_position_node.append(node) + return stage_position_node + + +def _create_activity_node( + step: str, + recipe: str, + has_activity_name: bool = True, +): + activity_node = ET.Element("Activity") + if has_activity_name: + activity_name_node = ET.Element("Name") + activity_name_node.text = step + activity_node.append(activity_name_node) + + # Add common nodes + # Is step enabled? + enabled_node = ET.Element("IsEnabled") + enabled_node.text = "true" + activity_node.append(enabled_node) + + # Execution result + activity_metadata_node = ET.Element("ActivityMetadata") + execution_result_node = ET.Element("ExecutionResult") + execution_result_node.text = "Finished" + activity_metadata_node.append(execution_result_node) + activity_node.append(activity_metadata_node) + + # Execution time + execution_time_node = ET.Element("ExecutionTime") + execution_time_node.text = "200 s" + + # Add activity-sepcific nodes + # Activities with "MillingAngle" node + if step == "Milling Angle": + milling_angle_node = ET.Element("MillingAngle") + milling_angle_node.text = "12.0 °" + activity_node.append(milling_angle_node) + # Activities with "SiteLocationType" node + if step in ( + "Image Acquisition", + "Reference Definition", + "Reference Redefinition 1", + "Reference Redefinition 2", + "Rough Milling - Electron Image", + "Medium Milling - Electron Image", + "Fine Milling - Electron Image", + "Finer Milling - Electron Image", + "Polishing 1 - Electron Image", + "Polishing 2 - Ion Image", + "Polishing 2 - Electron Image", + ): + site_location_type_node = ET.Element("SiteLocationType") + site_location_type_node.text = "Chunk" if recipe == "Milling" else "Thinning" + activity_node.append(site_location_type_node) + # Nodes with beam information + if step in ( + "Artificial Features", + "Stress Relief Cuts", + "Rough Milling", + "Rough Milling - Electron Image", + "Medium Milling", + "Medium Milling - Electron Image", + "Fine Milling", + "Fine Milling - Electron Image", + "Finer Milling", + "Finer Milling - Electron Image", + "Polishing 1", + "Polishing 1 - Electron Image", + "Polishing 2", + "Polishing 2 - Ion Image", + "Polishing 2 - Electron Image", + ): + # BeamPreset parent node + beam_node_name = "BeamPreset" + if "image" not in step.lower(): + beam_node_name = "MillingPreset" + beam_node = ET.Element(beam_node_name) + + # Use different values for ion and electron images + beam_type = "Electron" + voltage = "2 kV" + current = "25 pA" + if "ion" in step.lower() or "image" not in step.lower(): + beam_type = "Ion" + voltage = "30 kV" + current = "30 pA" + + beam_type_node = ET.Element("BeamType") + beam_type_node.text = beam_type + beam_node.append(beam_type_node) + + voltage_node = ET.Element("HighVoltage") + voltage_node.text = voltage + beam_node.append(voltage_node) + + current_node = ET.Element("BeamCurrent") + current_node.text = current + beam_node.append(current_node) + + activity_node.append(beam_node) + + # Nodes with milling information + if step in ( + "Stress Relief Cuts", + "Rough Milling", + "Medium Milling", + "Fine Milling", + "Finer Milling", + "Polishing 1", + "Polishing 2", + ): + # All 7 have DepthCorrection node + depth_correction_node = ET.Element("DepthCorrection") + depth_correction_node.text = "3" + activity_node.append(depth_correction_node) + + # "Rough Milling" has TrenchHeight nodes + if step == "Rough Milling": + trench_height_front_node = ET.Element("FrontTrenchHeight") + trench_height_front_node.text = "2 μm" + activity_node.append(trench_height_front_node) + + trench_height_rear_node = ET.Element("RearTrenchHeight") + trench_height_rear_node.text = "8 μm" + activity_node.append(trench_height_rear_node) + + # "Stress Relief Cuts" does not have other fields + if step != "Stress Relief Cuts": + # OffsetFromLamella node + lamella_offset_node = ET.Element("OffsetFromLamella") + lamella_offset_node.text = "2 μm" + activity_node.append(lamella_offset_node) + + # LamellaFrontLeftWidthOverlap node + width_overlap_front_left_node = ET.Element("LamellaFrontLeftWidthOverlap") + width_overlap_front_left_node.text = "2 μm" + activity_node.append(width_overlap_front_left_node) + + # LamellaFrontRightWidthOverlap node + width_overlap_front_right_node = ET.Element("LamellaFrontRightWidthOverlap") + width_overlap_front_right_node.text = "2 μm" + activity_node.append(width_overlap_front_right_node) + + # LamellaRearLeftWidthOverlap node + width_overlap_rear_left_node = ET.Element("LamellaRearLeftWidthOverlap") + width_overlap_rear_left_node.text = "2 μm" + activity_node.append(width_overlap_rear_left_node) + + # LamellaRearRightWidthOverlap node + width_overlap_rear_right_node = ET.Element("LamellaRearRightWidthOverlap") + width_overlap_rear_right_node.text = "2 μm" + activity_node.append(width_overlap_rear_right_node) + + return activity_node + + +def _create_site_node( + site_num: int, + has_site_name: bool = True, + has_recipes: bool = True, + has_recipe_name: bool = True, + has_activities: bool = True, + has_activity_name: bool = True, +): + # Create the root Site node + site_node = ET.Element("Site") + + if has_site_name: + name_node = ET.Element("Name") + name_node.text = "Lamella" + if site_num > 1: + name_node.text += f" ({site_num})" + site_node.append(name_node) + + # Create the stage position nodes + parameters_node = ET.Element("Parameters") + for path in STAGE_POSITION_NAMES.values(): + inner_node: ET.Element | None = None + for n, part in enumerate(reversed(path.split("/"))): + # Create the stage position node + match part: + # Create the innermost StagePosition node + case "StagePosition" if n == 0: + inner_node = _create_stage_position_node( + stage_values=stage_values, + ) + # Append more than one inner node to Parameters node + case "Parameters": + if inner_node is not None: + parameters_node.append(inner_node) + # Append every other inner node to a new node + case _: + if inner_node is not None: + node = ET.Element(part) + node.append(inner_node) + inner_node = node + if inner_node is not None: + site_node.append(inner_node) + # Append Parameters node separately + site_node.append(parameters_node) + + # Create the recipe and activity nodes + workflow_node = ET.Element("Workflow") + if has_recipes: + for recipe, steps in milling_steps.items(): + # Create a Recipe node + recipe_node = ET.Element("Recipe") + if has_recipe_name: + recipe_name_node = ET.Element("Name") + recipe_name_node.text = recipe + recipe_node.append(recipe_name_node) + + # Iterate and create Activity nodes + if has_activities: + activities_node = ET.Element("Activities") + for step in steps: + activities_node.append( + _create_activity_node( + step, + recipe, + has_activity_name=has_activity_name, + ) + ) + recipe_node.append(activities_node) + + workflow_node.append(recipe_node) + site_node.append(workflow_node) + return site_node + + +def create_fib_autotem_project_data( + visit_dir: Path, + has_project_name: bool = True, + has_sites: bool = True, + has_site_name: bool = True, + has_recipes: bool = True, + has_recipe_name: bool = True, + has_activities: bool = True, + has_activity_name: bool = True, +): + # Create root structure + autotem_node = ET.Element("AutoTEM") + project_node = ET.Element("Project", {"Origin": "MAPS"}) + + if has_project_name: + project_name_node = ET.Element("Name") + project_name_node.text = project_name + project_node.append(project_name_node) + + site_parent_node = ET.Element("Sites") + if has_sites: + # Construct individual Site nodes + for n in reversed(range(num_lamellae)): + n += 1 + site_parent_node.append( + _create_site_node( + n, + has_site_name=has_site_name, + has_recipes=has_recipes, + has_recipe_name=has_recipe_name, + has_activities=has_activities, + has_activity_name=has_activity_name, + ) + ) + + project_node.append(site_parent_node) + autotem_node.append(project_node) + + # Save the mock XML file + file = visit_dir / "autotem/visit/ProjectData.dat" + file.parent.mkdir(parents=True, exist_ok=True) + tree = ET.ElementTree(autotem_node) + ET.indent(tree, space=" ") + tree.write(file, encoding="utf-8", xml_declaration=True) + return file @pytest.fixture @@ -115,10 +431,10 @@ def test_file_transferred_to( # Mock the environment mock_environment = MagicMock() mock_environment.default_destinations = {visit_dir: "current_year"} - mock_environment.visit = "visit" + mock_environment.visit = visit_name # Iterate across the FIB files to compare against - destination_dir = tmp_path / "fib" / "data" / "current_year" / "visit" + destination_dir = tmp_path / "fib" / "data" / "current_year" / visit_name for file in fib_maps_images: # Work out what the expected destination will be assert _file_transferred_to( @@ -131,22 +447,151 @@ def test_file_transferred_to( @pytest.mark.parametrize( "test_params", - ( # File type to test | Use environment? | Find source? | Find destination? - ("drift_correction", True, True, True), - ("drift_correction", False, True, True), - ("drift_correction", True, False, True), - ("drift_correction", True, True, False), + ( # Has environment | Has Project name | Has Site | Has Site name | Has Recipe | Has Recipe name | Has Activity | Has Activity name + # Pass case + (True, True, True, True, True, True, True, True), + # Only one of these should be False at a given time + (True, True, True, True, True, True, True, False), + (True, True, True, True, True, True, False, True), + (True, True, True, True, True, False, True, True), + (True, True, True, True, False, True, True, True), + (True, True, True, False, True, True, True, True), + (True, True, False, True, True, True, True, True), + (True, False, True, True, True, True, True, True), + (False, True, True, True, True, True, True, True), + ), +) +def test_fib_autotem_context_projectdata( + mocker: MockerFixture, + test_params: tuple[bool, bool, bool, bool, bool, bool, bool, bool], + tmp_path: Path, + visit_dir: Path, +): + # Unpack test params + ( + has_environment, + has_project_name, + has_sites, + has_site_name, + has_recipes, + has_recipe_name, + has_activities, + has_activity_name, + ) = test_params + + # Mock the environment + mock_environment = None + if has_environment: + mock_environment = MagicMock() + + # Mock the logger to check that specific logs are called + mock_logger = mocker.patch("murfey.client.contexts.fib.logger") + + # Mock the functions used in 'post_transfer' + mock_copy = mocker.patch("murfey.client.contexts.fib.shutil.copyfile") + mock_capture_post = mocker.patch("murfey.client.contexts.fib.capture_post") + + # Create the mock metadata file to parse + mock_projectdata = create_fib_autotem_project_data( + visit_dir=visit_dir, + has_project_name=has_project_name, + has_sites=has_sites, + has_site_name=has_site_name, + has_recipes=has_recipes, + has_recipe_name=has_recipe_name, + has_activities=has_activities, + has_activity_name=has_activity_name, + ) + + # Initialise the FIBContext + basepath = visit_dir + context = FIBContext( + acquisition_software="autotem", + basepath=basepath, + machine_config={}, + token="", + ) + context.post_transfer(mock_projectdata, environment=mock_environment) + + # Check the success case + if all( + ( + has_environment, + has_project_name, + has_sites, + has_site_name, + has_recipes, + has_recipe_name, + has_activities, + has_activity_name, + ) + ): + mock_copy.assert_called_once() + assert mock_capture_post.call_count == num_lamellae + assert len(context._site_info) == num_lamellae + for i in range(num_lamellae): + mock_logger.info.assert_any_call(f"Updating metadata for site {i + 1}") + # These fail cases will return an empty dict and not call "post_transfer" + if not has_environment: + mock_logger.warning.assert_called_with("No environment passed in") + mock_capture_post.assert_not_called() + elif not has_project_name: + mock_logger.warning.assert_called_with("Metadata file has no project name") + mock_capture_post.assert_not_called() + elif not has_sites: + mock_logger.warning.assert_called_with( + f"No site information found in {str(mock_projectdata)}" + ) + mock_capture_post.assert_not_called() + elif not has_site_name: + mock_logger.warning.assert_called_with("Current site doesn't have a name") + mock_capture_post.assert_not_called() + elif not has_recipes: + for i in range(num_lamellae): + site_name = "Lamella" + if i > 0: + site_name += f" ({i + 1})" + mock_logger.warning.assert_any_call( + f"No recipes found for site {site_name}" + ) + mock_capture_post.assert_not_called() + # These fail cases will produce LamellaSiteInfo dicts with default values + # "post_transfer" will still be called + elif not has_recipe_name: + mock_logger.warning.assert_any_call("Recipe doesn't have a name, skipping") + assert mock_capture_post.call_count == num_lamellae + elif not has_activities: + for recipe_name in milling_steps.keys(): + mock_logger.warning.assert_any_call( + f"Recipe {recipe_name} doesn't have any activities" + ) + assert mock_capture_post.call_count == num_lamellae + elif not has_activity_name: + for recipe_name in milling_steps.keys(): + mock_logger.warning.assert_any_call( + f"Activitiy in recipe {recipe_name} doesn't have a name, skipping" + ) + assert mock_capture_post.call_count == num_lamellae + + +@pytest.mark.parametrize( + "test_params", + ( # Use environment? | Find source? | Find destination? + (True, True, True), + (False, True, True), + (True, False, True), + (True, True, False), ), ) -def test_fib_autotem_context( +def test_fib_autotem_context_drift_correction_images( mocker: MockerFixture, - test_params: tuple[str, bool, bool, bool], + test_params: tuple[bool, bool, bool], tmp_path: Path, visit_dir: Path, fib_autotem_dc_images: list[Path], ): # Unpack test params - file_type, use_env, find_source, find_dst = test_params + use_env, find_source, find_dst = test_params # Mock the environment mock_environment = None @@ -157,16 +602,23 @@ def test_fib_autotem_context( mock_logger = mocker.patch("murfey.client.contexts.fib.logger") # Create a list of destinations - destination_dir = tmp_path / "fib" / "data" / "current_year" / "visit" + destination_dir = tmp_path / "fib" / "data" / "current_year" / visit_name destination_files = [ destination_dir / file.relative_to(visit_dir) for file in fib_autotem_dc_images ] # Mock the functions used in 'post_transfer' mock_get_source = mocker.patch("murfey.client.contexts.fib._get_source") + mock_get_source.return_value = tmp_path if find_source else None + mock_file_transferred_to = mocker.patch( "murfey.client.contexts.fib._file_transferred_to" ) + if find_dst: + mock_file_transferred_to.side_effect = destination_files + else: + mock_file_transferred_to.return_value = None + mock_capture_post = mocker.patch("murfey.client.contexts.fib.capture_post") # Initialise the FIBContext @@ -178,38 +630,27 @@ def test_fib_autotem_context( token="", ) - match file_type: - case "drift_correction": - # Add case-specific return values and side-effects to the mocks - mock_get_source.return_value = tmp_path if find_source else None - if find_dst: - mock_file_transferred_to.side_effect = destination_files - else: - mock_file_transferred_to.return_value = None - - # Parse images one-by-one and check that expected calls were made - for file in fib_autotem_dc_images: - context.post_transfer(file, environment=mock_environment) - if not use_env: - mock_logger.warning.assert_called_with("No environment passed in") - elif not find_source: - mock_logger.warning.assert_called_with( - f"No source found for file {file}" - ) - elif not find_dst: - mock_logger.warning.assert_called_with( - f"File {file.name!r} not found on storage system" - ) - else: - mock_get_source.assert_called_with(file, mock_environment) - mock_file_transferred_to.assert_called_with( - environment=mock_environment, - source=basepath, - file_path=file, - rsync_basepath=Path(""), - ) - assert mock_capture_post.call_count == len(fib_autotem_dc_images) - assert len(context._drift_correction_images) == num_lamellae + # Parse images one-by-one and check that expected calls were made + for file in fib_autotem_dc_images: + context.post_transfer(file, environment=mock_environment) + if not use_env: + mock_logger.warning.assert_called_with("No environment passed in") + elif not find_source: + mock_logger.warning.assert_called_with(f"No source found for file {file}") + elif not find_dst: + mock_logger.warning.assert_called_with( + f"File {file.name!r} not found on storage system" + ) + else: + mock_get_source.assert_called_with(file, mock_environment) + mock_file_transferred_to.assert_called_with( + environment=mock_environment, + source=basepath, + file_path=file, + rsync_basepath=Path(""), + ) + assert mock_capture_post.call_count == len(fib_autotem_dc_images) + assert len(context._drift_correction_images) == num_lamellae def test_fib_maps_context( @@ -222,7 +663,7 @@ def test_fib_maps_context( mock_environment = MagicMock() # Create a list of destinations - destination_dir = tmp_path / "fib" / "data" / "current_year" / "visit" + destination_dir = tmp_path / "fib" / "data" / "current_year" / visit_name destination_files = [ destination_dir / file.relative_to(visit_dir) for file in fib_maps_images ] From 971781ee3d0ae8bf12f4a854b1d54369847482ef Mon Sep 17 00:00:00 2001 From: Eu Pin Tien Date: Mon, 20 Apr 2026 12:31:15 +0100 Subject: [PATCH 19/19] Add unit test for the '_parse_boolean' function --- tests/client/contexts/test_fib.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/tests/client/contexts/test_fib.py b/tests/client/contexts/test_fib.py index 5c271560..3be203b4 100644 --- a/tests/client/contexts/test_fib.py +++ b/tests/client/contexts/test_fib.py @@ -14,6 +14,7 @@ _file_transferred_to, _get_source, _number_from_name, + _parse_boolean, ) # Mock session values @@ -407,6 +408,26 @@ def test_number_from_name(test_params: tuple[str, int]): assert _number_from_name(name) == number +@pytest.mark.parametrize( + "test_params", + ( # Input | Expected output + ("True", True), + ("true", True), + ("T", True), + ("t", True), + ("1", True), + ("False", False), + ("false", False), + ("F", False), + ("f", False), + ("0", False), + ), +) +def test_parse_boolean(test_params: tuple[str, bool]): + text, expected_result = test_params + assert _parse_boolean(text) == expected_result + + def test_get_source( tmp_path: Path, visit_dir: Path,