File tree Expand file tree Collapse file tree 4 files changed +117
-0
lines changed Expand file tree Collapse file tree 4 files changed +117
-0
lines changed Original file line number Diff line number Diff line change @@ -11,6 +11,7 @@ dependencies = [
11
11
" httpx>=0.28.1" ,
12
12
" langchain-azure-ai>=0.1.4" ,
13
13
" langchain-community>=0.3.27" ,
14
+ " langchain-ollama>=0.3.6" ,
14
15
" langchain-openai>=0.3.28" ,
15
16
" langchain-text-splitters>=0.3.9" ,
16
17
" langgraph>=0.6.2" ,
Original file line number Diff line number Diff line change
1
+ import logging
2
+
3
+ import typer
4
+ from dotenv import load_dotenv
5
+
6
+ from template_langgraph .llms .ollamas import OllamaWrapper
7
+ from template_langgraph .loggers import get_logger
8
+
9
+ # Initialize the Typer application
10
+ app = typer .Typer (
11
+ add_completion = False ,
12
+ help = "Ollama operator CLI" ,
13
+ )
14
+
15
+ # Set up logging
16
+ logger = get_logger (__name__ )
17
+
18
+
19
+ @app .command ()
20
+ def run (
21
+ query : str = typer .Option (
22
+ "What is the weather like today?" ,
23
+ "--query" ,
24
+ "-q" ,
25
+ help = "Query to run against the Ollama model" ,
26
+ ),
27
+ verbose : bool = typer .Option (
28
+ False ,
29
+ "--verbose" ,
30
+ "-v" ,
31
+ help = "Enable verbose output" ,
32
+ ),
33
+ ):
34
+ # Set up logging
35
+ if verbose :
36
+ logger .setLevel (logging .DEBUG )
37
+
38
+ logger .info ("Running..." )
39
+ chat_model = OllamaWrapper ().chat_model
40
+ response = chat_model .invoke (
41
+ input = query ,
42
+ )
43
+ logger .debug (
44
+ response .model_dump_json (
45
+ indent = 2 ,
46
+ exclude_none = True ,
47
+ )
48
+ )
49
+ logger .info (f"Output: { response .content } " )
50
+
51
+
52
+ if __name__ == "__main__" :
53
+ load_dotenv (
54
+ override = True ,
55
+ verbose = True ,
56
+ )
57
+ app ()
Original file line number Diff line number Diff line change
1
+ from functools import lru_cache
2
+
3
+ from langchain_ollama import ChatOllama
4
+ from pydantic_settings import BaseSettings , SettingsConfigDict
5
+
6
+
7
+ class Settings (BaseSettings ):
8
+ ollama_model_chat : str = "phi3:latest"
9
+
10
+ model_config = SettingsConfigDict (
11
+ env_file = ".env" ,
12
+ env_ignore_empty = True ,
13
+ extra = "ignore" ,
14
+ )
15
+
16
+
17
+ @lru_cache
18
+ def get_ollama_settings () -> Settings :
19
+ return Settings ()
20
+
21
+
22
+ class OllamaWrapper :
23
+ def __init__ (self , settings : Settings = None ):
24
+ if settings is None :
25
+ settings = get_ollama_settings ()
26
+
27
+ self .chat_model = ChatOllama (
28
+ model = settings .ollama_model_chat ,
29
+ temperature = 0.0 ,
30
+ streaming = True ,
31
+ )
You can’t perform that action at this time.
0 commit comments