Coverage for openhcs/core/config_cache.py: 0.0%

124 statements  

« prev     ^ index     » next       coverage.py v7.10.7, created at 2025-10-01 18:33 +0000

1""" 

2Unified Global Configuration Cache System 

3 

4Provides shared configuration caching logic with pluggable execution strategies 

5for different UI frameworks (async for TUI, Qt threading for PyQt). 

6""" 

7 

8import logging 

9import dill as pickle 

10from abc import ABC, abstractmethod 

11from pathlib import Path 

12from typing import Optional, Callable, Any 

13from concurrent.futures import ThreadPoolExecutor 

14 

15from openhcs.core.config import GlobalPipelineConfig 

16 

17logger = logging.getLogger(__name__) 

18 

19 

20class CacheExecutionStrategy(ABC): 

21 """Abstract strategy for executing cache operations.""" 

22 

23 @abstractmethod 

24 async def execute_load(self, cache_file: Path) -> Optional[GlobalPipelineConfig]: 

25 """Execute cache load operation.""" 

26 pass 

27 

28 @abstractmethod 

29 async def execute_save(self, config: GlobalPipelineConfig, cache_file: Path) -> bool: 

30 """Execute cache save operation.""" 

31 pass 

32 

33 

34class AsyncExecutionStrategy(CacheExecutionStrategy): 

35 """Async execution strategy for TUI.""" 

36 

37 async def execute_load(self, cache_file: Path) -> Optional[GlobalPipelineConfig]: 

38 import asyncio 

39 loop = asyncio.get_running_loop() 

40 return await loop.run_in_executor(None, _sync_load_config, cache_file) 

41 

42 async def execute_save(self, config: GlobalPipelineConfig, cache_file: Path) -> bool: 

43 import asyncio 

44 loop = asyncio.get_running_loop() 

45 return await loop.run_in_executor(None, _sync_save_config, config, cache_file) 

46 

47 

48class QtExecutionStrategy(CacheExecutionStrategy): 

49 """Qt threading execution strategy for PyQt GUI.""" 

50 

51 def __init__(self, thread_pool=None): 

52 self.thread_pool = thread_pool or ThreadPoolExecutor(max_workers=2) 

53 

54 async def execute_load(self, cache_file: Path) -> Optional[GlobalPipelineConfig]: 

55 # Convert to sync for Qt integration 

56 return _sync_load_config(cache_file) 

57 

58 async def execute_save(self, config: GlobalPipelineConfig, cache_file: Path) -> bool: 

59 # Convert to sync for Qt integration  

60 return _sync_save_config(config, cache_file) 

61 

62 

63def _migrate_dataclass(cached_obj, target_type): 

64 """Recursively migrate dataclass with schema evolution.""" 

65 if not (hasattr(cached_obj, '__dataclass_fields__') and hasattr(target_type, '__dataclass_fields__')): 

66 return cached_obj 

67 

68 from dataclasses import fields 

69 preserved_values = {} 

70 for f in fields(target_type): 

71 if hasattr(cached_obj, f.name): 

72 old_value = getattr(cached_obj, f.name) 

73 preserved_values[f.name] = (_migrate_dataclass(old_value, f.type) 

74 if hasattr(f.type, '__dataclass_fields__') 

75 else old_value) 

76 return target_type(**preserved_values) 

77 

78 

79def _sync_load_config(cache_file: Path) -> Optional[GlobalPipelineConfig]: 

80 """Synchronous config loading implementation.""" 

81 try: 

82 if not cache_file.exists(): 

83 return None 

84 

85 with open(cache_file, 'rb') as f: 

86 cached_config = pickle.load(f) 

87 

88 if hasattr(cached_config, '__dataclass_fields__'): 

89 logger.debug(f"Loaded cached config from: {cache_file}") 

90 migrated_config = _migrate_dataclass(cached_config, GlobalPipelineConfig) 

91 

92 # CRITICAL FIX: Establish global config context after loading for proper placeholder resolution 

93 # This ensures that nested dataclass placeholders can resolve from the loaded GlobalPipelineConfig 

94 from openhcs.config_framework.lazy_factory import ensure_global_config_context 

95 ensure_global_config_context(GlobalPipelineConfig, migrated_config) 

96 logger.debug("Established global config context for loaded cached config") 

97 

98 return migrated_config 

99 else: 

100 logger.warning(f"Invalid config type in cache: {type(cached_config)}") 

101 return None 

102 

103 except pickle.PickleError as e: 

104 logger.warning(f"Failed to unpickle cached config: {e}") 

105 return None 

106 except Exception as e: 

107 logger.warning(f"Failed to load cached config: {e}") 

108 return None 

109 

110 

111def _sync_save_config(config: GlobalPipelineConfig, cache_file: Path) -> bool: 

112 """Synchronous config saving implementation.""" 

113 try: 

114 # Ensure cache directory exists 

115 cache_file.parent.mkdir(parents=True, exist_ok=True) 

116 

117 with open(cache_file, 'wb') as f: 

118 pickle.dump(config, f) 

119 

120 logger.debug(f"Saved config to cache: {cache_file}") 

121 return True 

122 

123 except Exception as e: 

124 logger.error(f"Failed to save config to cache: {e}") 

125 return False 

126 

127 

128class UnifiedGlobalConfigCache: 

129 """ 

130 Unified global configuration cache with pluggable execution strategies. 

131  

132 Supports both async (TUI) and Qt threading (PyQt) execution patterns 

133 while sharing the core caching logic. 

134 """ 

135 

136 def __init__(self, cache_file: Optional[Path] = None, strategy: Optional[CacheExecutionStrategy] = None): 

137 if cache_file is None: 

138 from openhcs.core.xdg_paths import get_config_file_path 

139 cache_file = get_config_file_path("global_config.config") 

140 

141 self.cache_file = cache_file 

142 self.strategy = strategy or AsyncExecutionStrategy() 

143 logger.debug(f"UnifiedGlobalConfigCache initialized with cache file: {self.cache_file}") 

144 

145 async def load_cached_config(self) -> Optional[GlobalPipelineConfig]: 

146 """Load cached global config from disk.""" 

147 return await self.strategy.execute_load(self.cache_file) 

148 

149 async def save_config_to_cache(self, config: GlobalPipelineConfig) -> bool: 

150 """Save global config to cache.""" 

151 return await self.strategy.execute_save(config, self.cache_file) 

152 

153 async def clear_cache(self) -> bool: 

154 """Clear cached config by removing the cache file.""" 

155 try: 

156 if self.cache_file.exists(): 

157 self.cache_file.unlink() 

158 logger.info(f"Cleared config cache: {self.cache_file}") 

159 return True 

160 except Exception as e: 

161 logger.error(f"Failed to clear config cache: {e}") 

162 return False 

163 

164 

165# Global instance for easy access 

166_global_config_cache: Optional[UnifiedGlobalConfigCache] = None 

167 

168 

169def get_global_config_cache(strategy: Optional[CacheExecutionStrategy] = None) -> UnifiedGlobalConfigCache: 

170 """Get global config cache instance with optional strategy.""" 

171 global _global_config_cache 

172 if _global_config_cache is None or (strategy and _global_config_cache.strategy != strategy): 

173 _global_config_cache = UnifiedGlobalConfigCache(strategy=strategy) 

174 return _global_config_cache 

175 

176 

177async def load_cached_global_config(strategy: Optional[CacheExecutionStrategy] = None) -> GlobalPipelineConfig: 

178 """ 

179 Load global config with cache fallback. 

180 

181 Args: 

182 strategy: Optional execution strategy (defaults to async) 

183 

184 Returns: 

185 GlobalPipelineConfig (cached or default) 

186 """ 

187 try: 

188 cache = get_global_config_cache(strategy) 

189 cached_config = await cache.load_cached_config() 

190 if cached_config is not None: 

191 logger.info("Using cached global configuration") 

192 

193 # CRITICAL FIX: Establish global config context after loading for proper placeholder resolution 

194 # This ensures that nested dataclass placeholders can resolve from the loaded GlobalPipelineConfig 

195 from openhcs.config_framework.lazy_factory import ensure_global_config_context 

196 ensure_global_config_context(GlobalPipelineConfig, cached_config) 

197 logger.debug("Established global config context for loaded cached config") 

198 

199 return cached_config 

200 except Exception as e: 

201 logger.warning(f"Failed to load cached config, using defaults: {e}") 

202 

203 # Fallback to default config 

204 logger.info("Using default global configuration") 

205 default_config = GlobalPipelineConfig() 

206 

207 # CRITICAL FIX: Also establish context for default config 

208 from openhcs.config_framework.lazy_factory import ensure_global_config_context 

209 ensure_global_config_context(GlobalPipelineConfig, default_config) 

210 

211 return default_config 

212 

213 

214def load_cached_global_config_sync() -> GlobalPipelineConfig: 

215 """ 

216 Synchronous version for startup scenarios. 

217 

218 Returns: 

219 GlobalPipelineConfig (cached or default) 

220 """ 

221 try: 

222 from openhcs.core.xdg_paths import get_config_file_path 

223 cache_file = get_config_file_path("global_config.config") 

224 cached_config = _sync_load_config(cache_file) 

225 if cached_config is not None: 

226 logger.info("Using cached global configuration") 

227 # Note: _sync_load_config already establishes context for cached configs 

228 return cached_config 

229 except Exception as e: 

230 logger.warning(f"Failed to load cached config, using defaults: {e}") 

231 

232 # Fallback to default config 

233 logger.info("Using default global configuration") 

234 default_config = GlobalPipelineConfig() 

235 

236 # CRITICAL FIX: Also establish context for default config 

237 from openhcs.config_framework.lazy_factory import ensure_global_config_context 

238 ensure_global_config_context(GlobalPipelineConfig, default_config) 

239 

240 return default_config