|
14 | 14 | SelfDebugLLM, VisualLLM) |
15 | 15 | from amadeusgpt.integration_module_hub import IntegrationModuleHub |
16 | 16 |
|
17 | | -amadeus_fac = {} |
18 | | - |
19 | | - |
20 | | -# using the config file to cache the amadeus instance |
21 | | -# not sure if this is the best practice |
22 | | -def create_amadeus(config: Config): |
23 | | - if str(config) not in amadeus_fac: |
24 | | - amadeus_fac[str(config)] = AMADEUS(config) |
25 | | - return amadeus_fac[str(config)] |
26 | | - |
27 | 17 | from amadeusgpt.analysis_objects.llm import (CodeGenerationLLM, DiagnosisLLM, |
28 | 18 | SelfDebugLLM, VisualLLM) |
29 | 19 | from amadeusgpt.integration_module_hub import IntegrationModuleHub |
30 | 20 |
|
31 | | -amadeus_fac = {} |
32 | | - |
33 | | - |
34 | | -# using the config file to cache the amadeus instance |
35 | | -# not sure if this is the best practice |
36 | | -def create_amadeus(config: Config): |
37 | | - if str(config) not in amadeus_fac: |
38 | | - amadeus_fac[str(config)] = AMADEUS(config) |
39 | | - return amadeus_fac[str(config)] |
40 | | - |
41 | | - |
42 | 21 | class AMADEUS: |
43 | 22 | def __init__(self, config: Dict[str, Any]): |
44 | 23 | self.config = config |
@@ -107,11 +86,11 @@ def run_task_program(self, task_program_name: str): |
107 | 86 | if __name__ == "__main__": |
108 | 87 | from amadeusgpt.analysis_objects.llm import VisualLLM |
109 | 88 | from amadeusgpt.config import Config |
110 | | - from amadeusgpt.main import create_amadeus |
| 89 | + |
111 | 90 |
|
112 | 91 | config = Config("amadeusgpt/configs/EPM_template.yaml") |
113 | 92 |
|
114 | | - amadeus = create_amadeus(config) |
| 93 | + amadeus = AMADEUS(config) |
115 | 94 | sandbox = amadeus.sandbox |
116 | 95 | visualLLm = VisualLLM(config) |
117 | 96 | visualLLm.speak(sandbox) |
0 commit comments