Coverage for openhcs/ui/shared/pattern_file_service.py: 12.6%

81 statements  

« prev     ^ index     » next       coverage.py v7.11.0, created at 2025-11-04 02:09 +0000

1""" 

2Pattern File Service - Framework-agnostic file I/O for function patterns. 

3 

4This service handles loading/saving .func files with async safety. 

5Does NOT include external editor integration (framework-specific). 

6""" 

7 

8import asyncio 

9import dill as pickle 

10import logging 

11from pathlib import Path 

12from typing import Union, List, Dict, Optional 

13 

14logger = logging.getLogger(__name__) 

15 

16 

17class PatternFileService: 

18 """ 

19 Framework-agnostic async-safe file I/O operations for function patterns. 

20  

21 Handles .func file loading/saving with proper async safety. 

22 Does NOT include external editor integration - that's framework-specific. 

23 """ 

24 

25 async def load_pattern_from_file(self, file_path: Path) -> Union[List, Dict]: 

26 """ 

27 Load and validate .func files with async safety. 

28  

29 Uses run_in_executor to prevent event loop deadlocks. 

30  

31 Args: 

32 file_path: Path to .func file 

33  

34 Returns: 

35 Loaded pattern (List or Dict) 

36  

37 Raises: 

38 FileNotFoundError: If file doesn't exist 

39 ValueError: If file content is invalid 

40 Exception: For other loading errors 

41 """ 

42 def _sync_load_pattern(path: Path) -> Union[List, Dict]: 

43 """Synchronous pattern loading for executor.""" 

44 if not path.exists(): 

45 raise FileNotFoundError(f"Pattern file not found: {path}") 

46 

47 if not path.is_file(): 

48 raise ValueError(f"Path is not a file: {path}") 

49 

50 try: 

51 with open(path, "rb") as f: 

52 pattern = pickle.load(f) 

53 

54 # Basic validation 

55 if not isinstance(pattern, (list, dict)): 

56 raise ValueError(f"Invalid pattern type: {type(pattern)}. Expected list or dict.") 

57 

58 return pattern 

59 

60 except pickle.PickleError as e: 

61 raise ValueError(f"Failed to unpickle pattern file: {e}") 

62 except Exception as e: 

63 raise Exception(f"Failed to load pattern file: {e}") 

64 

65 # Use asyncio.get_running_loop() instead of deprecated get_event_loop() 

66 loop = asyncio.get_running_loop() 

67 return await loop.run_in_executor(None, _sync_load_pattern, file_path) 

68 

69 async def save_pattern_to_file(self, pattern: Union[List, Dict], file_path: Path) -> None: 

70 """ 

71 Save patterns with pickle using async safety. 

72  

73 Uses run_in_executor to prevent event loop deadlocks. 

74  

75 Args: 

76 pattern: Pattern to save (List or Dict) 

77 file_path: Path to save to 

78  

79 Raises: 

80 ValueError: If pattern is invalid 

81 Exception: For saving errors 

82 """ 

83 def _sync_save_pattern(pattern_data: Union[List, Dict], path: Path) -> None: 

84 """Synchronous pattern saving for executor.""" 

85 # Basic validation 

86 if not isinstance(pattern_data, (list, dict)): 

87 raise ValueError(f"Invalid pattern type: {type(pattern_data)}. Expected list or dict.") 

88 

89 # Ensure parent directory exists 

90 path.parent.mkdir(parents=True, exist_ok=True) 

91 

92 try: 

93 with open(path, "wb") as f: 

94 pickle.dump(pattern_data, f) 

95 

96 except Exception as e: 

97 raise Exception(f"Failed to save pattern file: {e}") 

98 

99 loop = asyncio.get_running_loop() 

100 await loop.run_in_executor(None, _sync_save_pattern, pattern, file_path) 

101 

102 async def validate_pattern_file(self, file_path: Path) -> tuple[bool, Optional[str]]: 

103 """ 

104 Validate .func file without loading it completely. 

105  

106 Args: 

107 file_path: Path to validate 

108  

109 Returns: 

110 Tuple of (is_valid, error_message) 

111 """ 

112 def _sync_validate_file(path: Path) -> tuple[bool, Optional[str]]: 

113 """Synchronous file validation for executor.""" 

114 if not path.exists(): 

115 return False, f"File does not exist: {path}" 

116 

117 if not path.is_file(): 

118 return False, f"Path is not a file: {path}" 

119 

120 if not path.suffix == '.func': 

121 return False, f"File does not have .func extension: {path}" 

122 

123 try: 

124 # Try to load just the header to check if it's a valid pickle 

125 with open(path, "rb") as f: 

126 # Read first few bytes to check pickle format 

127 header = f.read(10) 

128 if not header.startswith(b'\x80'): # Pickle protocol marker 

129 return False, "File is not a valid pickle file" 

130 

131 return True, None 

132 

133 except Exception as e: 

134 return False, f"File validation failed: {e}" 

135 

136 loop = asyncio.get_running_loop() 

137 return await loop.run_in_executor(None, _sync_validate_file, file_path) 

138 

139 def get_default_save_path(self, base_name: str = "pattern") -> str: 

140 """ 

141 Get default save path for .func files. 

142  

143 Args: 

144 base_name: Base filename without extension 

145  

146 Returns: 

147 Default save path string 

148 """ 

149 return f"{base_name}.func" 

150 

151 def ensure_func_extension(self, file_path: str) -> str: 

152 """ 

153 Ensure file path has .func extension. 

154  

155 Args: 

156 file_path: Original file path 

157  

158 Returns: 

159 File path with .func extension 

160 """ 

161 path = Path(file_path) 

162 if path.suffix != '.func': 

163 return str(path.with_suffix('.func')) 

164 return file_path 

165 

166 async def backup_pattern_file(self, file_path: Path) -> Optional[Path]: 

167 """ 

168 Create backup of existing pattern file before overwriting. 

169  

170 Args: 

171 file_path: Original file path 

172  

173 Returns: 

174 Backup file path if created, None if no backup needed 

175 """ 

176 if not file_path.exists(): 

177 return None 

178 

179 def _sync_backup_file(original_path: Path) -> Path: 

180 """Synchronous file backup for executor.""" 

181 backup_path = original_path.with_suffix(f"{original_path.suffix}.backup") 

182 

183 # If backup already exists, add timestamp 

184 if backup_path.exists(): 

185 import time 

186 timestamp = int(time.time()) 

187 backup_path = original_path.with_suffix(f"{original_path.suffix}.backup.{timestamp}") 

188 

189 # Copy file 

190 import shutil 

191 shutil.copy2(original_path, backup_path) 

192 return backup_path 

193 

194 try: 

195 loop = asyncio.get_running_loop() 

196 backup_path = await loop.run_in_executor(None, _sync_backup_file, file_path) 

197 return backup_path 

198 except Exception as e: 

199 logger.warning(f"Failed to create backup for {file_path}: {e}") 

200 return None 

201