Coverage for openhcs/core/config.py: 80.5%
343 statements
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-01 18:33 +0000
« prev ^ index » next coverage.py v7.10.7, created at 2025-10-01 18:33 +0000
1"""
2Global configuration dataclasses for OpenHCS.
4This module defines the primary configuration objects used throughout the application,
5such as VFSConfig, PathPlanningConfig, and the overarching GlobalPipelineConfig.
6Configuration is intended to be immutable and provided as Python objects.
7"""
9import logging
10import os # For a potentially more dynamic default for num_workers
11import threading
12import dataclasses
13from dataclasses import dataclass, field
14from pathlib import Path
15from typing import Literal, Optional, Union, Dict, Any, List, Type
16from enum import Enum
17from abc import ABC, abstractmethod
18from openhcs.constants import Microscope
19from openhcs.constants.constants import Backend
21# Import decorator for automatic decorator creation
22from openhcs.config_framework import auto_create_decorator
24# Import TilingLayout for TUI configuration
25try:
26 from textual_window import TilingLayout
27except ImportError:
28 # Fallback for when textual-window is not available
29 from enum import Enum
30 class TilingLayout(Enum):
31 FLOATING = "floating"
32 MASTER_DETAIL = "master_detail"
34logger = logging.getLogger(__name__)
37class ZarrCompressor(Enum):
38 """Available compression algorithms for zarr storage."""
39 BLOSC = "blosc"
40 ZLIB = "zlib"
41 LZ4 = "lz4"
42 ZSTD = "zstd"
43 NONE = "none"
45 def create_compressor(self, compression_level: int, shuffle: bool = True) -> Optional[Any]:
46 """Create the actual zarr compressor instance.
48 Args:
49 compression_level: Compression level (1-22 for ZSTD, 1-9 for others)
50 shuffle: Enable byte shuffling for better compression (blosc only)
52 Returns:
53 Configured zarr compressor instance or None for no compression
54 """
55 import zarr
57 match self:
58 case ZarrCompressor.NONE: 58 ↛ 59line 58 didn't jump to line 59 because the pattern on line 58 never matched
59 return None
60 case ZarrCompressor.BLOSC: 60 ↛ 61line 60 didn't jump to line 61 because the pattern on line 60 never matched
61 return zarr.Blosc(cname='lz4', clevel=compression_level, shuffle=shuffle)
62 case ZarrCompressor.ZLIB: 62 ↛ 64line 62 didn't jump to line 64 because the pattern on line 62 always matched
63 return zarr.Zlib(level=compression_level)
64 case ZarrCompressor.LZ4:
65 return zarr.LZ4(acceleration=compression_level)
66 case ZarrCompressor.ZSTD:
67 return zarr.Zstd(level=compression_level)
70class ZarrChunkStrategy(Enum):
71 """Chunking strategies for zarr arrays."""
72 SINGLE = "single" # Single chunk per array (optimal for batch I/O)
73 AUTO = "auto" # Let zarr decide chunk size
74 CUSTOM = "custom" # User-defined chunk sizes
77class MaterializationBackend(Enum):
78 """Available backends for materialization (persistent storage only)."""
79 AUTO = "auto"
80 ZARR = "zarr"
81 DISK = "disk"
84class WellFilterMode(Enum):
85 """Well filtering modes for selective materialization."""
86 INCLUDE = "include" # Materialize only specified wells
87 EXCLUDE = "exclude" # Materialize all wells except specified ones
89@auto_create_decorator
90@dataclass(frozen=True)
91class GlobalPipelineConfig:
92 """
93 Root configuration object for an OpenHCS pipeline session.
94 This object is intended to be instantiated at application startup and treated as immutable.
95 """
96 num_workers: int = 1
97 """Number of worker processes/threads for parallelizable tasks."""
99 test_str_field: str = "test"
101 materialization_results_path: Path = Path("results")
102 """
103 Path for materialized analysis results (CSV, JSON files from special outputs).
105 This is a pipeline-wide setting that controls where all special output materialization
106 functions save their analysis results, regardless of which step produces them.
108 Can be relative to plate folder or absolute path.
109 Default: "results" creates a results/ folder in the plate directory.
110 Examples: "results", "./analysis", "/data/analysis_results", "../shared_results"
112 Note: This is separate from per-step image materialization, which is controlled
113 by the sub_dir field in each step's step_materialization_config.
114 """
116 microscope: Microscope = Microscope.AUTO
117 """Default microscope type for auto-detection."""
119 #use_threading: bool = field(default_factory=lambda: os.getenv('OPENHCS_USE_THREADING', 'false').lower() == 'true')
120 use_threading: bool = field(default_factory=lambda: os.getenv('OPENHCS_USE_THREADING', 'false').lower() == 'true')
121 """Use ThreadPoolExecutor instead of ProcessPoolExecutor for debugging. Reads from OPENHCS_USE_THREADING environment variable."""
123 # Future extension point:
124 # logging_config: Optional[Dict[str, Any]] = None # For configuring logging levels, handlers
125 # plugin_settings: Dict[str, Any] = field(default_factory=dict) # For plugin-specific settings
128# PipelineConfig will be created automatically by the injection system
129# (GlobalPipelineConfig → PipelineConfig by removing "Global" prefix)
133def _headless_mode() -> bool:
134 """Detect headless/CI contexts where viz deps should not be required at import time."""
135 try:
136 if os.getenv('CI', '').lower() == 'true': 136 ↛ 138line 136 didn't jump to line 138 because the condition on line 136 was always true
137 return True
138 if os.getenv('OPENHCS_CPU_ONLY', '').lower() == 'true':
139 return True
140 if os.getenv('OPENHCS_HEADLESS', '').lower() == 'true':
141 return True
142 except Exception:
143 # Fail-closed to False; only explicit envs enable headless mode
144 pass
145 return False
148def _get_available_colormaps():
149 """Get available colormaps using introspection - napari first, then matplotlib."""
150 # In headless/CI/CPU-only contexts, avoid importing viz libs; return minimal stable set
151 if _headless_mode(): 151 ↛ 158line 151 didn't jump to line 158 because the condition on line 151 was always true
152 return [
153 'gray',
154 'viridis',
155 ]
157 # Try napari first (preferred for napari visualization)
158 try:
159 from napari.utils.colormaps import AVAILABLE_COLORMAPS
160 return list(AVAILABLE_COLORMAPS.keys())
161 except ImportError:
162 pass
164 # Try matplotlib as fallback
165 try:
166 import matplotlib.pyplot as plt
167 return list(plt.colormaps())
168 except ImportError:
169 pass
171 # If both fail, return empty list - fail loud, no hardcoding
172 raise ImportError("Neither napari nor matplotlib colormaps are available. Install napari or matplotlib.")
175def _create_colormap_enum():
176 """Create a dynamic enum for available colormaps using pure introspection."""
177 available_cmaps = _get_available_colormaps()
179 if not available_cmaps: 179 ↛ 180line 179 didn't jump to line 180 because the condition on line 179 was never true
180 raise ValueError("No colormaps available for enum creation")
182 # Create enum members dictionary with proper Python identifier conversion
183 members = {}
184 for cmap_name in available_cmaps:
185 # Convert to valid Python identifier
186 enum_name = cmap_name.replace(' ', '_').replace('-', '_').replace('.', '_').upper()
187 # Handle names that start with numbers
188 if enum_name and enum_name[0].isdigit(): 188 ↛ 189line 188 didn't jump to line 189 because the condition on line 188 was never true
189 enum_name = f"CMAP_{enum_name}"
190 # Ensure we have a valid identifier
191 if enum_name and enum_name.replace('_', '').replace('CMAP', '').isalnum(): 191 ↛ 184line 191 didn't jump to line 184 because the condition on line 191 was always true
192 members[enum_name] = cmap_name
194 if not members: 194 ↛ 195line 194 didn't jump to line 195 because the condition on line 194 was never true
195 raise ValueError("No valid colormap identifiers could be created")
197 # Create the enum class dynamically
198 return Enum('NapariColormap', members)
201# Create the colormap enum using pure introspection
202NapariColormap = _create_colormap_enum()
205class NapariDimensionMode(Enum):
206 """How to handle different dimensions in napari visualization."""
207 SLICE = "slice" # Show as 2D slice (take middle slice)
208 STACK = "stack" # Show as 3D stack/volume
211def _create_napari_display_config():
212 """Dynamically create NapariDisplayConfig with component-specific fields."""
213 # Define components locally to avoid circular import
214 from enum import Enum
216 class VariableComponents(Enum):
217 SITE = "site"
218 CHANNEL = "channel"
219 Z_INDEX = "z_index"
220 WELL = "well"
222 variable_components = list(VariableComponents)
224 # Create field annotations and defaults
225 annotations = {
226 'colormap': NapariColormap,
227 }
228 defaults = {
229 'colormap': NapariColormap.GRAY,
230 }
232 # Add dynamic component mode fields
233 for component in variable_components:
234 field_name = f"{component.value}_mode"
235 annotations[field_name] = NapariDimensionMode
236 # Default: channel=SLICE (separate 2D slices), everything else=STACK (3D volumes)
237 default_mode = NapariDimensionMode.SLICE if component.value == 'channel' else NapariDimensionMode.STACK
238 defaults[field_name] = default_mode
240 # Create the class dynamically
241 def __init__(self, **kwargs):
242 # Set defaults for any missing fields
243 for field_name, default_value in defaults.items():
244 if field_name not in kwargs:
245 kwargs[field_name] = default_value
247 # Set all attributes
248 for field_name, value in kwargs.items():
249 object.__setattr__(self, field_name, value)
251 def get_dimension_mode(self, component) -> NapariDimensionMode:
252 """Get the dimension mode for a given component."""
253 # Handle enum components, component names, or string values
254 if hasattr(component, 'value'):
255 component_value = component.value
256 elif hasattr(component, 'name'):
257 component_value = component.name.lower()
258 else:
259 # Handle string input
260 component_value = str(component).lower()
262 # Look up the corresponding field
263 field_name = f"{component_value}_mode"
264 mode = getattr(self, field_name, None)
266 # Handle None values from inheritance system - use defaults
267 if mode is None:
268 # Default: channel=SLICE (separate 2D slices), everything else=STACK (3D volumes)
269 return NapariDimensionMode.SLICE if component_value == 'channel' else NapariDimensionMode.STACK
271 return mode
273 def get_colormap_name(self) -> str:
274 """Get the string name of the colormap for serialization."""
275 return self.colormap.value
279 # Create class attributes
280 class_attrs = {
281 '__annotations__': annotations,
282 '__init__': __init__,
283 'get_dimension_mode': get_dimension_mode,
284 'get_colormap_name': get_colormap_name,
286 '__doc__': """Configuration for napari display behavior for all OpenHCS components.
288 This class is dynamically generated with individual fields for each variable component.
289 Each component has a corresponding {component}_mode field that controls whether
290 it's displayed as a slice or stack in napari.
291 """,
292 }
294 # Add default values as class attributes for dataclass compatibility
295 for field_name, default_value in defaults.items():
296 class_attrs[field_name] = default_value
298 # Create the class
299 NapariDisplayConfig = type('NapariDisplayConfig', (), class_attrs)
301 # Make it a frozen dataclass
302 NapariDisplayConfig = dataclass(frozen=True)(NapariDisplayConfig)
304 return NapariDisplayConfig
306# Create the dynamic class
307NapariDisplayConfig = _create_napari_display_config()
309# Apply the global pipeline config decorator
310NapariDisplayConfig = global_pipeline_config(NapariDisplayConfig)
313@global_pipeline_config
314@dataclass(frozen=True)
315class WellFilterConfig:
316 """Base configuration for well filtering functionality."""
317 well_filter: Optional[Union[List[str], str, int]] = None
318 """Well filter specification: list of wells, pattern string, or max count integer. None means all wells."""
320 well_filter_mode: WellFilterMode = WellFilterMode.INCLUDE
321 """Whether well_filter is an include list or exclude list."""
324@global_pipeline_config
325@dataclass(frozen=True)
326class ZarrConfig:
327 """Configuration for Zarr storage backend."""
328 store_name: str = "images"
329 """Name of the zarr store directory."""
331 compressor: ZarrCompressor = ZarrCompressor.ZLIB
332 """Compression algorithm to use."""
334 compression_level: int = 3
335 """Compression level (1-9 for LZ4, higher = more compression)."""
337 shuffle: bool = True
338 """Enable byte shuffling for better compression (blosc only)."""
340 chunk_strategy: ZarrChunkStrategy = ZarrChunkStrategy.SINGLE
341 """Chunking strategy for zarr arrays."""
343 ome_zarr_metadata: bool = True
344 """Generate OME-ZARR compatible metadata and structure."""
346 write_plate_metadata: bool = True
347 """Write plate-level metadata for HCS viewing (required for OME-ZARR viewers like napari)."""
350@global_pipeline_config
351@dataclass(frozen=True)
352class VFSConfig:
353 """Configuration for Virtual File System (VFS) related operations."""
354 read_backend: Backend = Backend.AUTO
355 """Backend for reading input data. AUTO uses metadata-based detection for OpenHCS plates."""
357 intermediate_backend: Backend = Backend.MEMORY
358 """Backend for storing intermediate step results that are not explicitly materialized."""
360 materialization_backend: MaterializationBackend = MaterializationBackend.DISK
361 """Backend for explicitly materialized outputs (e.g., final results, user-requested saves)."""
364@global_pipeline_config
365@dataclass(frozen=True)
366class AnalysisConsolidationConfig:
367 """Configuration for automatic analysis results consolidation."""
368 enabled: bool = True
369 """Whether to automatically run analysis consolidation after pipeline completion."""
371 metaxpress_style: bool = True
372 """Whether to generate MetaXpress-compatible output format with headers."""
374 well_pattern: str = r"([A-Z]\d{2})"
375 """Regex pattern for extracting well IDs from filenames."""
377 file_extensions: tuple[str, ...] = (".csv",)
378 """File extensions to include in consolidation."""
380 exclude_patterns: tuple[str, ...] = (r".*consolidated.*", r".*metaxpress.*", r".*summary.*")
381 """Filename patterns to exclude from consolidation."""
383 output_filename: str = "metaxpress_style_summary.csv"
384 """Name of the consolidated output file."""
387@global_pipeline_config
388@dataclass(frozen=True)
389class PlateMetadataConfig:
390 """Configuration for plate metadata in MetaXpress-style output."""
391 barcode: Optional[str] = None
392 """Plate barcode. If None, will be auto-generated from plate name."""
394 plate_name: Optional[str] = None
395 """Plate name. If None, will be derived from plate path."""
397 plate_id: Optional[str] = None
398 """Plate ID. If None, will be auto-generated."""
400 description: Optional[str] = None
401 """Experiment description. If None, will be auto-generated."""
403 acquisition_user: str = "OpenHCS"
404 """User who acquired the data."""
406 z_step: str = "1"
407 """Z-step information for MetaXpress compatibility."""
410@global_pipeline_config
411@dataclass(frozen=True)
412class ExperimentalAnalysisConfig:
413 """Configuration for experimental analysis system."""
414 enabled: bool = True
415 """Whether experimental analysis is enabled."""
417 config_file_name: str = "config.xlsx"
418 """Name of the experimental configuration Excel file."""
420 design_sheet_name: str = "drug_curve_map"
421 """Name of the sheet containing experimental design."""
423 plate_groups_sheet_name: str = "plate_groups"
424 """Name of the sheet containing plate group mappings."""
426 normalization_method: str = "fold_change"
427 """Normalization method: fold_change, z_score, percent_control."""
429 export_raw_results: bool = True
430 """Whether to export raw (non-normalized) results."""
432 export_heatmaps: bool = True
433 """Whether to generate heatmap visualizations."""
435 auto_detect_format: bool = True
436 """Whether to automatically detect microscope format."""
438 default_format: Optional[str] = None
439 """Default format to use if auto-detection fails."""
441 enable_wells_exclusion: bool = True
442 """Whether to support wells exclusion from analysis (via 'Exclude Wells' row in config)."""
444 metaxpress_summary_enabled: bool = True
445 """Whether to generate MetaXpress-style summary output by default."""
448@global_pipeline_config
449@dataclass(frozen=True)
450class PathPlanningConfig(WellFilterConfig):
451 """
452 Configuration for pipeline path planning and directory structure.
454 This class handles path construction concerns including plate root directories,
455 output directory suffixes, and subdirectory organization. It does not handle
456 analysis results location, which is controlled at the pipeline level.
458 Inherits well filtering functionality from WellFilterConfig.
459 """
460 output_dir_suffix: str = "_openhcs"
461 """Default suffix for general step output directories."""
463 global_output_folder: Optional[Path] = None
464 """
465 Optional global output folder where all plate workspaces and outputs will be created.
466 If specified, plate workspaces will be created as {global_output_folder}/{plate_name}_workspace/
467 and outputs as {global_output_folder}/{plate_name}_workspace_outputs/.
468 If None, uses the current behavior (workspace and outputs in same directory as plate).
469 Example: "/data/results" or "/mnt/hcs_output"
470 """
472 sub_dir: str = "images"
473 """
474 Subdirectory within plate folder for storing processed data.
475 Examples: "images", "processed", "data/images"
476 """
478@global_pipeline_config
479@dataclass(frozen=True)
480class StepWellFilterConfig(WellFilterConfig):
481 """Well filter configuration specialized for step-level configs with different defaults."""
482 # Override defaults for step-level configurations
483 well_filter: Optional[Union[List[str], str, int]] = 1
485@global_pipeline_config
486@dataclass(frozen=True)
487class StepMaterializationConfig(StepWellFilterConfig, PathPlanningConfig):
488 """
489 Configuration for per-step materialization - configurable in UI.
491 This dataclass appears in the UI like any other configuration, allowing users
492 to set pipeline-level defaults for step materialization behavior. All step
493 materialization instances will inherit these defaults unless explicitly overridden.
495 Uses multiple inheritance from PathPlanningConfig and StepWellFilterConfig.
496 """
498 #Override sub_dir for materialization-specific default
499 sub_dir: str = "checkpoints"
500 """Subdirectory for materialized outputs (different from global 'images')."""
503@global_pipeline_config
504@dataclass(frozen=True)
505class FunctionRegistryConfig:
506 """Configuration for function registry behavior across all libraries."""
507 enable_scalar_functions: bool = True
508 """
509 Whether to register functions that return scalars.
510 When True: Scalar-returning functions are wrapped as (array, scalar) tuples.
511 When False: Scalar-returning functions are filtered out entirely.
512 Applies uniformly to all libraries (CuPy, scikit-image, pyclesperanto).
513 """
516@global_pipeline_config
517@dataclass(frozen=True)
518class VisualizerConfig:
519 """Configuration for shared visualization system settings."""
520 temp_directory: Optional[Path] = None
521 """Directory for temporary visualization files. If None, will auto-create in system temp."""
523@global_pipeline_config
524@dataclass(frozen=True)
525class StreamingDefaults:
526 """Default configuration for streaming to visualizers."""
527 persistent: bool = True
528 """Whether viewer stays open after pipeline completion."""
530@global_pipeline_config(ui_hidden=True)
531@dataclass(frozen=True)
532class StreamingConfig(StepWellFilterConfig, StreamingDefaults, ABC):
533 """Abstract base configuration for streaming to visualizers.
535 Uses multiple inheritance from StepWellFilterConfig and StreamingDefaults.
536 Inherited fields are automatically set to None by @global_pipeline_config(inherit_as_none=True).
537 """
539 @property
540 @abstractmethod
541 def backend(self) -> Backend:
542 """Backend enum for this streaming type."""
543 pass
545 @property
546 @abstractmethod
547 def step_plan_output_key(self) -> str:
548 """Key to use in step_plan for this config's output paths."""
549 pass
551 @abstractmethod
552 def get_streaming_kwargs(self, global_config) -> dict:
553 """Return kwargs needed for this streaming backend."""
554 pass
556 @abstractmethod
557 def create_visualizer(self, filemanager, visualizer_config):
558 """Create and return the appropriate visualizer for this streaming config."""
559 pass
562@global_pipeline_config
563@dataclass(frozen=True)
564class NapariStreamingConfig(StreamingConfig,NapariDisplayConfig):
565 """Configuration for napari streaming."""
566 napari_port: int = 5555
567 """Port for napari streaming communication."""
569 @property
570 def backend(self) -> Backend:
571 return Backend.NAPARI_STREAM
573 @property
574 def step_plan_output_key(self) -> str:
575 return "napari_streaming_paths"
577 def get_streaming_kwargs(self, context) -> dict:
578 kwargs = {
579 "napari_port": self.napari_port,
580 "display_config": self # self is now the display config
581 }
583 # Include microscope handler for component parsing
584 if context:
585 kwargs["microscope_handler"] = context.microscope_handler
587 return kwargs
589 def create_visualizer(self, filemanager, visualizer_config):
590 from openhcs.runtime.napari_stream_visualizer import NapariStreamVisualizer
591 return NapariStreamVisualizer(
592 filemanager,
593 visualizer_config,
594 viewer_title="OpenHCS Pipeline Visualization",
595 persistent=self.persistent,
596 napari_port=self.napari_port,
597 display_config=self # self is now the display config
598 )
601@global_pipeline_config
602@dataclass(frozen=True)
603class FijiStreamingConfig(StreamingConfig):
604 """Configuration for fiji streaming."""
605 fiji_executable_path: Optional[Path] = None
606 """Path to Fiji/ImageJ executable. If None, will auto-detect."""
608 @property
609 def backend(self) -> Backend:
610 return Backend.FIJI_STREAM
612 @property
613 def step_plan_output_key(self) -> str:
614 return "fiji_streaming_paths"
616 def get_streaming_kwargs(self, global_config) -> dict:
617 return {"fiji_executable_path": self.fiji_executable_path}
619 def create_visualizer(self, filemanager, visualizer_config):
620 from openhcs.runtime.fiji_stream_visualizer import FijiStreamVisualizer
621 return FijiStreamVisualizer(
622 filemanager,
623 viewer_title="OpenHCS Fiji Visualization",
624 visualizer_config=visualizer_config,
625 persistent=self.persistent
626 )
628@dataclass(frozen=True)
629class TilingKeybinding:
630 """Declarative mapping between key combination and window manager method."""
631 key: str
632 action: str # method name that already exists
633 description: str
635@dataclass(frozen=True)
636class TilingKeybindings:
637 """Declarative mapping of tiling keybindings to existing methods."""
639 # Focus controls
640 focus_next: TilingKeybinding = TilingKeybinding("ctrl+j", "focus_next_window", "Focus Next Window")
641 focus_prev: TilingKeybinding = TilingKeybinding("ctrl+k", "focus_previous_window", "Focus Previous Window")
643 # Layout controls - map to wrapper methods
644 horizontal_split: TilingKeybinding = TilingKeybinding("ctrl+shift+h", "set_horizontal_split", "Horizontal Split")
645 vertical_split: TilingKeybinding = TilingKeybinding("ctrl+shift+v", "set_vertical_split", "Vertical Split")
646 grid_layout: TilingKeybinding = TilingKeybinding("ctrl+shift+g", "set_grid_layout", "Grid Layout")
647 master_detail: TilingKeybinding = TilingKeybinding("ctrl+shift+m", "set_master_detail", "Master Detail")
648 toggle_floating: TilingKeybinding = TilingKeybinding("ctrl+shift+f", "toggle_floating", "Toggle Floating")
650 # Window movement - map to extracted window_manager methods
651 move_window_prev: TilingKeybinding = TilingKeybinding("ctrl+shift+left", "move_focused_window_prev", "Move Window Left")
652 move_window_next: TilingKeybinding = TilingKeybinding("ctrl+shift+right", "move_focused_window_next", "Move Window Right")
653 rotate_left: TilingKeybinding = TilingKeybinding("ctrl+alt+left", "rotate_window_order_left", "Rotate Windows Left")
654 rotate_right: TilingKeybinding = TilingKeybinding("ctrl+alt+right", "rotate_window_order_right", "Rotate Windows Right")
656 # Gap controls
657 gap_increase: TilingKeybinding = TilingKeybinding("ctrl+plus", "gap_increase", "Increase Gap")
658 gap_decrease: TilingKeybinding = TilingKeybinding("ctrl+minus", "gap_decrease", "Decrease Gap")
660 # Bulk operations
661 minimize_all: TilingKeybinding = TilingKeybinding("ctrl+shift+d", "minimize_all_windows", "Minimize All")
662 open_all: TilingKeybinding = TilingKeybinding("ctrl+shift+o", "open_all_windows", "Open All")
666@dataclass(frozen=True)
667class TUIConfig:
668 """Configuration for OpenHCS Textual User Interface."""
669 default_tiling_layout: TilingLayout = TilingLayout.MASTER_DETAIL
670 """Default tiling layout for window manager on startup."""
672 default_window_gap: int = 1
673 """Default gap between windows in tiling mode (in characters)."""
675 enable_startup_notification: bool = True
676 """Whether to show notification about tiling mode on startup."""
678 keybindings: TilingKeybindings = field(default_factory=TilingKeybindings)
679 """Declarative mapping of all tiling keybindings."""
682# Inject all accumulated fields at the end of module loading
683from openhcs.config_framework.lazy_factory import _inject_all_pending_fields
684_inject_all_pending_fields()
687# ============================================================================
688# Configuration Framework Initialization
689# ============================================================================
691# Initialize configuration framework with OpenHCS types
692from openhcs.config_framework import set_base_config_type
694set_base_config_type(GlobalPipelineConfig)
696# Note: We use the framework's default MRO-based priority function.
697# More derived classes automatically get higher priority through MRO depth.
698# No custom priority function needed - the framework handles it generically.
700logger.debug("Configuration framework initialized with OpenHCS types")