Coverage for openhcs/textual_tui/__main__.py: 0.0%

117 statements  

« prev     ^ index     » next       coverage.py v7.11.0, created at 2025-11-04 02:09 +0000

1""" 

2OpenHCS Textual TUI Entry Point 

3 

4Entry point for the OpenHCS Textual TUI application. 

5Replicates the existing pattern from the prompt-toolkit TUI. 

6""" 

7 

8import argparse 

9import asyncio 

10import logging 

11import multiprocessing 

12import sys 

13from pathlib import Path 

14 

15 

16from openhcs.core.orchestrator.gpu_scheduler import setup_global_gpu_registry 

17 

18from .app import OpenHCSTUIApp 

19 

20 

21def _parse_command_line_arguments(): 

22 """Parse command line arguments.""" 

23 parser = argparse.ArgumentParser( 

24 description="OpenHCS Textual TUI - Modern Terminal User Interface" 

25 ) 

26 

27 parser.add_argument( 

28 "--debug", 

29 action="store_true", 

30 help="Enable debug logging" 

31 ) 

32 

33 parser.add_argument( 

34 "--workspace", 

35 type=str, 

36 default=None, 

37 help="Workspace directory for outputs (optional)" 

38 ) 

39 

40 parser.add_argument( 

41 "--web", 

42 action="store_true", 

43 help="Serve the TUI via textual-web instead of running locally" 

44 ) 

45 

46 return parser.parse_args() 

47 

48 

49def _serve_web(): 

50 """Serve the TUI via textual-serve.""" 

51 try: 

52 # Check if textual-serve is installed 

53 import textual_serve 

54 print("✅ textual-serve found") 

55 except ImportError: 

56 print("❌ textual-serve is not installed!") 

57 print("📦 Install it with: pip install textual-serve") 

58 sys.exit(1) 

59 

60 try: 

61 print("🌐 Starting OpenHCS web server...") 

62 print("🔗 Your TUI will be available at: http://localhost:8000") 

63 print("📝 Share this URL to give others access to your OpenHCS TUI") 

64 print("⚠️ Note: The TUI runs on YOUR machine, others just see it in their browser") 

65 print() 

66 

67 # Use textual-serve to serve the TUI 

68 from textual_serve.server import Server 

69 

70 server = Server( 

71 command="python -m openhcs.textual_tui", 

72 host="localhost", 

73 port=8000, 

74 title="OpenHCS - High-Content Screening Platform" 

75 ) 

76 

77 server.serve() 

78 

79 except KeyboardInterrupt: 

80 print("\n🛑 Web server stopped by user") 

81 except Exception as e: 

82 print(f"❌ Error running textual-serve: {e}") 

83 sys.exit(1) 

84 

85 

86def _setup_logging(debug: bool = False): 

87 """Setup unified logging configuration for entire OpenHCS system.""" 

88 log_level = logging.DEBUG if debug else logging.INFO 

89 

90 # Create logs directory 

91 log_dir = Path.home() / ".local" / "share" / "openhcs" / "logs" 

92 log_dir.mkdir(parents=True, exist_ok=True) 

93 

94 # Create timestamped log file 

95 import time 

96 log_file = log_dir / f"openhcs_unified_{time.strftime('%Y%m%d_%H%M%S')}.log" 

97 

98 # Setup unified logging for entire OpenHCS system 

99 root_logger = logging.getLogger() 

100 

101 # Clear any existing handlers to ensure clean state 

102 root_logger.handlers.clear() 

103 

104 # Setup file-only logging (no console output for TUI) 

105 file_handler = logging.FileHandler(log_file) 

106 file_handler.setFormatter(logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) 

107 

108 root_logger.addHandler(file_handler) 

109 root_logger.setLevel(log_level) 

110 

111 # Prevent other modules from adding console handlers 

112 logging.basicConfig = lambda *args, **kwargs: None 

113 

114 # Set OpenHCS logger level for all components 

115 logging.getLogger("openhcs").setLevel(log_level) 

116 logger = logging.getLogger("openhcs.main") 

117 logger.info(f"OpenHCS unified logging started - Level: {logging.getLevelName(log_level)}") 

118 logger.info(f"Log file: {log_file}") 

119 return logger 

120 

121 

122def main(): 

123 """Main entry point for OpenHCS Textual TUI.""" 

124 args = _parse_command_line_arguments() 

125 

126 # If web flag is set, serve via textual-serve instead 

127 if args.web: 

128 _serve_web() 

129 return 

130 

131 # For TUI mode, run async 

132 asyncio.run(main_async(args)) 

133 

134 

135def _setup_signal_handlers(): 

136 """Setup signal handlers for clean shutdown.""" 

137 import signal 

138 import threading 

139 import os 

140 

141 def force_cleanup(signum, frame): 

142 """Force cleanup all threads on signal.""" 

143 print("\nForcing immediate exit...") 

144 

145 # Try to cleanup background threads first 

146 try: 

147 active_threads = [t for t in threading.enumerate() if t != threading.current_thread() and t.is_alive()] 

148 if active_threads: 

149 print(f"Cleaning up {len(active_threads)} background threads...") 

150 # Can't set daemon on running threads, just note them and force exit 

151 except: 

152 pass 

153 

154 # Force immediate exit 

155 os._exit(0) 

156 

157 signal.signal(signal.SIGINT, force_cleanup) 

158 signal.signal(signal.SIGTERM, force_cleanup) 

159 

160async def main_async(args): 

161 """Async main function for TUI mode.""" 

162 

163 # Setup signal handlers for clean shutdown 

164 _setup_signal_handlers() 

165 

166 # Set multiprocessing start method FIRST, before any other initialization 

167 try: 

168 multiprocessing.set_start_method('spawn', force=True) 

169 print("Set multiprocessing start method to 'spawn' for CUDA compatibility") 

170 except RuntimeError: 

171 # Already set, check if it's spawn 

172 current_method = multiprocessing.get_start_method() 

173 if current_method != 'spawn': 

174 print(f"Warning: Multiprocessing start method is '{current_method}', not 'spawn'. CUDA may not work in worker processes.") 

175 else: 

176 print("Multiprocessing start method already set to 'spawn'") 

177 

178 logger = _setup_logging(args.debug) 

179 

180 try: 

181 # Load global configuration with cache support 

182 from openhcs.textual_tui.services.config_cache_adapter import load_cached_global_config_tui as load_cached_global_config 

183 global_config = await load_cached_global_config() 

184 logger.info("Global configuration loaded") 

185 

186 # Setup GPU registry 

187 setup_global_gpu_registry(global_config=global_config) 

188 logger.info("GPU registry setup completed") 

189 

190 # Create and run the Textual app 

191 app = OpenHCSTUIApp(global_config=global_config) 

192 logger.info("Starting OpenHCS Textual TUI application...") 

193 

194 # Run the app with a timeout wrapper to prevent hanging 

195 try: 

196 await asyncio.wait_for(app.run_async(), timeout=None) # No timeout for normal operation 

197 except asyncio.TimeoutError: 

198 logger.warning("App run timed out, forcing exit") 

199 import os 

200 os._exit(0) 

201 

202 except KeyboardInterrupt: 

203 logger.info("TUI terminated by user (Ctrl+C)") 

204 except Exception as e: 

205 logger.error(f"Unhandled error in TUI: {e}", exc_info=True) 

206 sys.exit(1) 

207 finally: 

208 logger.info("OpenHCS Textual TUI finished") 

209 

210 # Final cleanup check - force exit if we're still hanging 

211 try: 

212 import threading 

213 import time 

214 time.sleep(0.1) 

215 active_threads = [t for t in threading.enumerate() if t != threading.current_thread() and t.is_alive()] 

216 if active_threads: 

217 logger.warning(f"Final cleanup: {len(active_threads)} threads still active, forcing exit") 

218 import os 

219 os._exit(0) 

220 except (ImportError, AttributeError, OSError) as cleanup_error: 

221 # If final cleanup fails, just log it - we're exiting anyway 

222 logger.debug(f"Final cleanup failed (non-critical): {cleanup_error}") 

223 

224 

225if __name__ == "__main__": 

226 main()