1+ from argparse import Namespace
2+ from ..models import ModelLoader , QuantConfig
3+ from ..data import DatasetLoader
4+ from ..training import FineTuningTrainer , ModelEvaluator
5+ from ..config import ModelConfig , TrainingConfig , DatasetConfig
6+ from ..runtime import DeviceManager
7+ from ..utils .monitoring import TrainingLogger
8+
9+ def train (args : Namespace ):
10+ """Execute model training command."""
11+ logger = TrainingLogger ()
12+ device_manager = DeviceManager ()
13+
14+ try :
15+ # Load configurations
16+ model_config = ModelConfig (
17+ model_name = args .model ,
18+ load_in_4bit = args .quantization == "4bit" ,
19+ load_in_8bit = args .quantization == "8bit" ,
20+ use_lora = args .use_lora
21+ )
22+
23+ dataset_config = DatasetConfig (dataset_name = args .dataset )
24+ training_config = TrainingConfig (output_dir = args .output_dir )
25+
26+ # Initialize components
27+ model_loader = ModelLoader (model_config )
28+ dataset_loader = DatasetLoader (dataset_config )
29+
30+ # Load model and dataset
31+ model , tokenizer = model_loader .load ()
32+ dataset = dataset_loader .load ()
33+
34+ # Initialize trainer
35+ trainer = FineTuningTrainer (
36+ model = model ,
37+ tokenizer = tokenizer ,
38+ dataset = dataset ,
39+ config = training_config ,
40+ device_manager = device_manager
41+ )
42+
43+ # Start training
44+ trainer .train ()
45+
46+ except Exception as e :
47+ logger .error (f"Training failed: { str (e )} " )
48+ raise
49+
50+ def evaluate (args : Namespace ):
51+ """Execute model evaluation command."""
52+ logger = TrainingLogger ()
53+ device_manager = DeviceManager ()
54+
55+ try :
56+ # Load model and dataset
57+ model_config = ModelConfig (model_name = args .model )
58+ dataset_config = DatasetConfig (dataset_name = args .dataset )
59+
60+ model_loader = ModelLoader (model_config )
61+ dataset_loader = DatasetLoader (dataset_config )
62+
63+ model , tokenizer = model_loader .load ()
64+ dataset = dataset_loader .load ()
65+
66+ # Initialize evaluator
67+ evaluator = ModelEvaluator (
68+ model = model ,
69+ tokenizer = tokenizer ,
70+ device_manager = device_manager
71+ )
72+
73+ # Run evaluation
74+ results = evaluator .evaluate (dataset )
75+
76+ # Save results if output file specified
77+ if args .output_file :
78+ evaluator .save_results (results , args .output_file )
79+
80+ except Exception as e :
81+ logger .error (f"Evaluation failed: { str (e )} " )
82+ raise
83+
84+ def quantize (args : Namespace ):
85+ """Execute model quantization command."""
86+ logger = TrainingLogger ()
87+
88+ try :
89+ # Configure quantization
90+ model_config = ModelConfig (
91+ model_name = args .model ,
92+ load_in_4bit = args .bits == 4 ,
93+ load_in_8bit = args .bits == 8
94+ )
95+
96+ # Load and quantize model
97+ model_loader = ModelLoader (model_config )
98+ model , tokenizer = model_loader .load ()
99+
100+ # Save quantized model
101+ model .save_pretrained (args .output_dir )
102+ tokenizer .save_pretrained (args .output_dir )
103+
104+ except Exception as e :
105+ logger .error (f"Quantization failed: { str (e )} " )
106+ raise
107+
108+ def serve (args : Namespace ):
109+ """Execute model serving command."""
110+ logger = TrainingLogger ()
111+ device_manager = DeviceManager ()
112+
113+ try :
114+ from ..serving import ModelServer
115+
116+ # Load model
117+ model_config = ModelConfig (model_name = args .model )
118+ model_loader = ModelLoader (model_config )
119+ model , tokenizer = model_loader .load ()
120+
121+ # Initialize and start server
122+ server = ModelServer (
123+ model = model ,
124+ tokenizer = tokenizer ,
125+ device_manager = device_manager ,
126+ host = args .host ,
127+ port = args .port
128+ )
129+
130+ server .start ()
131+
132+ except Exception as e :
133+ logger .error (f"Server startup failed: { str (e )} " )
134+ raise
0 commit comments