Coverage for openhcs/textual_tui/__main__.py: 0.0%

120 statements  

« prev     ^ index     » next       coverage.py v7.10.3, created at 2025-08-14 05:57 +0000

1""" 

2OpenHCS Textual TUI Entry Point 

3 

4Entry point for the OpenHCS Textual TUI application. 

5Replicates the existing pattern from the prompt-toolkit TUI. 

6""" 

7 

8import argparse 

9import asyncio 

10import logging 

11import multiprocessing 

12import sys 

13import subprocess 

14import tempfile 

15from pathlib import Path 

16 

17from openhcs.core.config import get_default_global_config 

18from openhcs.core.orchestrator.gpu_scheduler import setup_global_gpu_registry 

19 

20from .app import OpenHCSTUIApp 

21 

22 

23def _parse_command_line_arguments(): 

24 """Parse command line arguments.""" 

25 parser = argparse.ArgumentParser( 

26 description="OpenHCS Textual TUI - Modern Terminal User Interface" 

27 ) 

28 

29 parser.add_argument( 

30 "--debug", 

31 action="store_true", 

32 help="Enable debug logging" 

33 ) 

34 

35 parser.add_argument( 

36 "--workspace", 

37 type=str, 

38 default=None, 

39 help="Workspace directory for outputs (optional)" 

40 ) 

41 

42 parser.add_argument( 

43 "--web", 

44 action="store_true", 

45 help="Serve the TUI via textual-web instead of running locally" 

46 ) 

47 

48 return parser.parse_args() 

49 

50 

51def _serve_web(): 

52 """Serve the TUI via textual-serve.""" 

53 try: 

54 # Check if textual-serve is installed 

55 import textual_serve 

56 print("✅ textual-serve found") 

57 except ImportError: 

58 print("❌ textual-serve is not installed!") 

59 print("📦 Install it with: pip install textual-serve") 

60 sys.exit(1) 

61 

62 try: 

63 print("🌐 Starting OpenHCS web server...") 

64 print("🔗 Your TUI will be available at: http://localhost:8000") 

65 print("📝 Share this URL to give others access to your OpenHCS TUI") 

66 print("⚠️ Note: The TUI runs on YOUR machine, others just see it in their browser") 

67 print() 

68 

69 # Use textual-serve to serve the TUI 

70 from textual_serve.server import Server 

71 

72 server = Server( 

73 command="python -m openhcs.textual_tui", 

74 host="localhost", 

75 port=8000, 

76 title="OpenHCS - High-Content Screening Platform" 

77 ) 

78 

79 server.serve() 

80 

81 except KeyboardInterrupt: 

82 print("\n🛑 Web server stopped by user") 

83 except Exception as e: 

84 print(f"❌ Error running textual-serve: {e}") 

85 sys.exit(1) 

86 

87 

88def _setup_logging(debug: bool = False): 

89 """Setup unified logging configuration for entire OpenHCS system.""" 

90 log_level = logging.DEBUG if debug else logging.INFO 

91 

92 # Create logs directory 

93 log_dir = Path.home() / ".local" / "share" / "openhcs" / "logs" 

94 log_dir.mkdir(parents=True, exist_ok=True) 

95 

96 # Create timestamped log file 

97 import time 

98 log_file = log_dir / f"openhcs_unified_{time.strftime('%Y%m%d_%H%M%S')}.log" 

99 

100 # Setup unified logging for entire OpenHCS system 

101 root_logger = logging.getLogger() 

102 

103 # Clear any existing handlers to ensure clean state 

104 root_logger.handlers.clear() 

105 

106 # Setup file-only logging (no console output for TUI) 

107 file_handler = logging.FileHandler(log_file) 

108 file_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) 

109 

110 root_logger.addHandler(file_handler) 

111 root_logger.setLevel(log_level) 

112 

113 # Prevent other modules from adding console handlers 

114 logging.basicConfig = lambda *args, **kwargs: None 

115 

116 # Set OpenHCS logger level for all components 

117 logging.getLogger("openhcs").setLevel(log_level) 

118 logger = logging.getLogger("openhcs.main") 

119 logger.info(f"OpenHCS unified logging started - Level: {logging.getLevelName(log_level)}") 

120 logger.info(f"Log file: {log_file}") 

121 return logger 

122 

123 

124def main(): 

125 """Main entry point for OpenHCS Textual TUI.""" 

126 args = _parse_command_line_arguments() 

127 

128 # If web flag is set, serve via textual-serve instead 

129 if args.web: 

130 _serve_web() 

131 return 

132 

133 # For TUI mode, run async 

134 asyncio.run(main_async(args)) 

135 

136 

137def _setup_signal_handlers(): 

138 """Setup signal handlers for clean shutdown.""" 

139 import signal 

140 import threading 

141 import os 

142 

143 def force_cleanup(signum, frame): 

144 """Force cleanup all threads on signal.""" 

145 print("\nForcing immediate exit...") 

146 

147 # Try to cleanup background threads first 

148 try: 

149 active_threads = [t for t in threading.enumerate() if t != threading.current_thread() and t.is_alive()] 

150 if active_threads: 

151 print(f"Cleaning up {len(active_threads)} background threads...") 

152 # Can't set daemon on running threads, just note them and force exit 

153 except: 

154 pass 

155 

156 # Force immediate exit 

157 os._exit(0) 

158 

159 signal.signal(signal.SIGINT, force_cleanup) 

160 signal.signal(signal.SIGTERM, force_cleanup) 

161 

162async def main_async(args): 

163 """Async main function for TUI mode.""" 

164 

165 # Setup signal handlers for clean shutdown 

166 _setup_signal_handlers() 

167 

168 # Set multiprocessing start method FIRST, before any other initialization 

169 try: 

170 multiprocessing.set_start_method('spawn', force=True) 

171 print("Set multiprocessing start method to 'spawn' for CUDA compatibility") 

172 except RuntimeError: 

173 # Already set, check if it's spawn 

174 current_method = multiprocessing.get_start_method() 

175 if current_method != 'spawn': 

176 print(f"Warning: Multiprocessing start method is '{current_method}', not 'spawn'. CUDA may not work in worker processes.") 

177 else: 

178 print("Multiprocessing start method already set to 'spawn'") 

179 

180 logger = _setup_logging(args.debug) 

181 

182 try: 

183 # Load global configuration with cache support 

184 from openhcs.textual_tui.services.config_cache_adapter import load_cached_global_config_tui as load_cached_global_config 

185 global_config = await load_cached_global_config() 

186 logger.info("Global configuration loaded") 

187 

188 # Setup GPU registry 

189 setup_global_gpu_registry(global_config=global_config) 

190 logger.info("GPU registry setup completed") 

191 

192 # Create and run the Textual app 

193 app = OpenHCSTUIApp(global_config=global_config) 

194 logger.info("Starting OpenHCS Textual TUI application...") 

195 

196 # Run the app with a timeout wrapper to prevent hanging 

197 try: 

198 await asyncio.wait_for(app.run_async(), timeout=None) # No timeout for normal operation 

199 except asyncio.TimeoutError: 

200 logger.warning("App run timed out, forcing exit") 

201 import os 

202 os._exit(0) 

203 

204 except KeyboardInterrupt: 

205 logger.info("TUI terminated by user (Ctrl+C)") 

206 except Exception as e: 

207 logger.error(f"Unhandled error in TUI: {e}", exc_info=True) 

208 sys.exit(1) 

209 finally: 

210 logger.info("OpenHCS Textual TUI finished") 

211 

212 # Final cleanup check - force exit if we're still hanging 

213 try: 

214 import threading 

215 import time 

216 time.sleep(0.1) 

217 active_threads = [t for t in threading.enumerate() if t != threading.current_thread() and t.is_alive()] 

218 if active_threads: 

219 logger.warning(f"Final cleanup: {len(active_threads)} threads still active, forcing exit") 

220 import os 

221 os._exit(0) 

222 except (ImportError, AttributeError, OSError) as cleanup_error: 

223 # If final cleanup fails, just log it - we're exiting anyway 

224 logger.debug(f"Final cleanup failed (non-critical): {cleanup_error}") 

225 

226 

227if __name__ == "__main__": 

228 main()