1818 AzureChatOpenAI ,
1919 OpenAIEmbeddings ,
2020)
21- from langchain_community .llms .bedrock import Bedrock
2221
23- # .env 파일 로딩
24- load_dotenv ()
22+ env_path = os .path .join (os .getcwd (), ".env" )
2523
24+ if os .path .exists (env_path ):
25+ load_dotenv (env_path , override = True )
26+ print (f"✅ 환경변수 파일(.env)이 { os .getcwd ()} 에 로드되었습니다!" )
27+ else :
28+ print (f"⚠️ 환경변수 파일(.env)이 { os .getcwd ()} 에 없습니다!" )
2629
27- def get_llm () -> BaseLanguageModel :
30+ def get_llm (** kwargs ) -> BaseLanguageModel :
2831 """
2932 return chat model interface
3033 """
3134 provider = os .getenv ("LLM_PROVIDER" )
35+ print (os .environ ["LLM_PROVIDER" ])
3236
3337 if provider is None :
3438 raise ValueError ("LLM_PROVIDER environment variable is not set." )
3539
3640 if provider == "openai" :
37- return get_llm_openai ()
41+ return get_llm_openai (** kwargs )
3842
3943 elif provider == "azure" :
40- return get_llm_azure ()
44+ return get_llm_azure (** kwargs )
4145
4246 elif provider == "bedrock" :
43- return get_llm_bedrock ()
47+ return get_llm_bedrock (** kwargs )
4448
4549 elif provider == "gemini" :
46- return get_llm_gemini ()
50+ return get_llm_gemini (** kwargs )
4751
4852 elif provider == "ollama" :
49- return get_llm_ollama ()
53+ return get_llm_ollama (** kwargs )
5054
5155 elif provider == "huggingface" :
52- return get_llm_huggingface ()
56+ return get_llm_huggingface (** kwargs )
5357
5458 else :
5559 raise ValueError (f"Invalid LLM API Provider: { provider } " )
5660
5761
58- def get_llm_openai () -> BaseLanguageModel :
62+ def get_llm_openai (** kwargs ) -> BaseLanguageModel :
5963 return ChatOpenAI (
60- model = os .getenv ("OPEN_MODEL_PREF " , "gpt-4o" ),
64+ model = os .getenv ("OPEN_AI_LLM_MODEL " , "gpt-4o" ),
6165 api_key = os .getenv ("OPEN_AI_KEY" ),
66+ ** kwargs ,
6267 )
6368
6469
65- def get_llm_azure () -> BaseLanguageModel :
70+ def get_llm_azure (** kwargs ) -> BaseLanguageModel :
6671 return AzureChatOpenAI (
6772 api_key = os .getenv ("AZURE_OPENAI_LLM_KEY" ),
6873 azure_endpoint = os .getenv ("AZURE_OPENAI_LLM_ENDPOINT" ),
6974 azure_deployment = os .getenv ("AZURE_OPENAI_LLM_MODEL" ), # Deployment name
7075 api_version = os .getenv ("AZURE_OPENAI_LLM_API_VERSION" , "2023-07-01-preview" ),
76+ ** kwargs ,
7177 )
7278
7379
74- def get_llm_bedrock () -> BaseLanguageModel :
80+ def get_llm_bedrock (** kwargs ) -> BaseLanguageModel :
7581 return ChatBedrockConverse (
7682 model = os .getenv ("AWS_BEDROCK_LLM_MODEL" ),
7783 aws_access_key_id = os .getenv ("AWS_BEDROCK_LLM_ACCESS_KEY_ID" ),
7884 aws_secret_access_key = os .getenv ("AWS_BEDROCK_LLM_SECRET_ACCESS_KEY" ),
7985 region_name = os .getenv ("AWS_BEDROCK_LLM_REGION" , "us-east-1" ),
86+ ** kwargs ,
8087 )
8188
8289
83- def get_llm_gemini () -> BaseLanguageModel :
84- return ChatGoogleGenerativeAI (model = os .getenv ("GEMINI_LLM_MODEL" ))
90+ def get_llm_gemini (** kwargs ) -> BaseLanguageModel :
91+ return ChatGoogleGenerativeAI (model = os .getenv ("GEMINI_LLM_MODEL" ), ** kwargs )
8592
8693
87- def get_llm_ollama () -> BaseLanguageModel :
94+ def get_llm_ollama (** kwargs ) -> BaseLanguageModel :
8895 base_url = os .getenv ("OLLAMA_LLM_BASE_URL" )
8996 if base_url :
90- return ChatOllama (base_url = base_url , model = os .getenv ("OLLAMA_LLM_MODEL" ))
97+ return ChatOllama (base_url = base_url , model = os .getenv ("OLLAMA_LLM_MODEL" ), ** kwargs )
9198 else :
92- return ChatOllama (model = os .getenv ("OLLAMA_LLM_MODEL" ))
99+ return ChatOllama (model = os .getenv ("OLLAMA_LLM_MODEL" ), ** kwargs )
93100
94101
95- def get_llm_huggingface () -> BaseLanguageModel :
102+ def get_llm_huggingface (** kwargs ) -> BaseLanguageModel :
96103 return ChatHuggingFace (
97104 llm = HuggingFaceEndpoint (
98105 model = os .getenv ("HUGGING_FACE_LLM_MODEL" ),
99106 repo_id = os .getenv ("HUGGING_FACE_LLM_REPO_ID" ),
100107 task = "text-generation" ,
101108 endpoint_url = os .getenv ("HUGGING_FACE_LLM_ENDPOINT" ),
102109 huggingfacehub_api_token = os .getenv ("HUGGING_FACE_LLM_API_TOKEN" ),
110+ ** kwargs ,
103111 )
104112 )
105113
@@ -109,6 +117,7 @@ def get_embeddings() -> Optional[BaseLanguageModel]:
109117 return embedding model interface
110118 """
111119 provider = os .getenv ("EMBEDDING_PROVIDER" )
120+ print (provider )
112121
113122 if provider is None :
114123 raise ValueError ("EMBEDDING_PROVIDER environment variable is not set." )
@@ -135,7 +144,7 @@ def get_embeddings() -> Optional[BaseLanguageModel]:
135144def get_embeddings_openai () -> BaseLanguageModel :
136145 return OpenAIEmbeddings (
137146 model = os .getenv ("OPEN_AI_EMBEDDING_MODEL" ),
138- openai_api_key = os .getenv ("OPEN_AI_EMBEDDING_KEY " ),
147+ openai_api_key = os .getenv ("OPEN_AI_KEY " ),
139148 )
140149
141150
0 commit comments