11#!/usr/bin/env python3
22
3+ import sys
4+ import os
5+ import argparse
6+ import logging
37from flask import Flask , request
48from jinja2 import Environment , FileSystemLoader
59import ollama
610import markdown
711from pathlib import Path
812import json
9- import logging
10- import sys
11- import os
1213
14+ __author__ = 'github@ryanhoke.net'
15+ __license__ = 'GPL 3.0'
1316# Get version
1417app_root = os .path .dirname (os .path .abspath (__file__ ))
1518app_version = os .path .join (app_root , '.version' )
1619with open (app_version , 'r' ) as f :
1720 __version__ = f .read ().strip ()
1821
19- # Set up logging to stdout
20- logger = logging .getLogger (__name__ )
21- logger .setLevel (logging .DEBUG )
22- logger .handlers = []
23- console_handler = logging .StreamHandler (sys .stdout )
24- console_handler .setLevel (logging .DEBUG )
25- formatter = logging .Formatter ('%(asctime)s - %(levelname)s - %(message)s' )
26- console_handler .setFormatter (formatter )
27- logger .addHandler (console_handler )
28- l = logger
29-
3022app = Flask (__name__ )
3123
3224# Default Ollama configuration from environment variable
3325OLLAMA_HOST = os .getenv ("OLLAMA_HOST" , "localhost:11434" )
3426
3527# Chat history directory
36- CHAT_DIR = Path .home () / ".ollama-flask-chat "
28+ CHAT_DIR = Path .home () / ".flask_chat "
3729CHAT_DIR .mkdir (exist_ok = True )
3830
3931# Set up Jinja2 environment
4032template_dir = Path (__file__ ).parent / "templates"
41- l .debug (f"Template directory set to: { template_dir } " )
4233jj_env = Environment (loader = FileSystemLoader (template_dir ))
4334
44- def get_ollama_models (client ):
35+ def get_ollama_models (client , logger ):
4536 try :
4637 models = client .list ()
47- l .debug (f"Raw models response: { models } " )
38+ logger .debug (f"Raw models response: { models } " )
4839 model_list = models .models
49- l .debug (f"Extracted model list: { model_list } " )
40+ logger .debug (f"Extracted model list: { model_list } " )
5041 if not model_list :
51- l .debug ("Models list is empty" )
42+ logger .debug ("Models list is empty" )
5243 return ["No models available" ]
5344 names = []
5445 for model in model_list :
5546 if hasattr (model , 'model' ):
5647 names .append (model .model )
5748 else :
58- l .debug (f"Skipping invalid model entry: { model } " )
59- l .debug (f"Final model names: { names } " )
49+ logger .debug (f"Skipping invalid model entry: { model } " )
50+ logger .debug (f"Final model names: { names } " )
6051 return names if names else ["No valid models found" ]
6152 except Exception as e :
62- l .debug (f"Exception in get_ollama_models: { str (e )} " )
53+ logger .debug (f"Exception in get_ollama_models: { str (e )} " )
6354 return ["Error fetching models: " + str (e )]
6455
65- def save_chat_history (model , history ):
56+ def save_chat_history (model , history , logger ):
6657 history_file = CHAT_DIR / f"{ model .replace (':' , '_' )} .json"
6758 with open (history_file , 'w' ) as f :
6859 json .dump (history , f )
69- l .debug (f"Saved chat history for model { model } : { history } " )
60+ logger .debug (f"Saved chat history for model { model } : { history } " )
7061
71- def load_chat_history (model ):
62+ def load_chat_history (model , logger ):
7263 history_file = CHAT_DIR / f"{ model .replace (':' , '_' )} .json"
7364 if history_file .exists ():
7465 with open (history_file , 'r' ) as f :
7566 history = json .load (f )
76- l .debug (f"Loaded chat history for model { model } : { history } " )
67+ logger .debug (f"Loaded chat history for model { model } : { history } " )
7768 return history
78- l .debug (f"No chat history found for model { model } , returning empty list" )
69+ logger .debug (f"No chat history found for model { model } , returning empty list" )
7970 return []
8071
8172@app .route ('/' , methods = ['GET' , 'POST' ])
8273def chat ():
83- l .debug (f"Received { request .method } request" )
74+ app . logger .debug (f"Received { request .method } request" )
8475
8576 # Get current configuration from form or use OLLAMA_HOST default
8677 host = request .form .get ('host' , OLLAMA_HOST )
8778 model = request .form .get ('model' , '' )
88- l .debug (f"Configuration - host: { host } , model: { model } " )
79+ app . logger .debug (f"Configuration - host: { host } , model: { model } " )
8980
9081 # Configure ollama client with error handling
9182 host_url = f"http://{ host } " if not host .startswith ("http://" ) else host
9283 try :
9384 client = ollama .Client (host = host_url )
94- l .debug (f"Ollama client configured for { host_url } " )
85+ app . logger .debug (f"Ollama client configured for { host_url } " )
9586 except Exception as e :
96- l .error (f"Failed to initialize Ollama client: { str (e )} " )
87+ app . logger .error (f"Failed to initialize Ollama client: { str (e )} " )
9788 models = ["Error: Could not connect to Ollama" ]
9889 model = "No models available"
9990 chat_history = []
100- l .debug ("Rendering template with error state due to client failure" )
91+ app . logger .debug ("Rendering template with error state due to client failure" )
10192 jj_template = jj_env .get_template ('chat.html' )
10293 return jj_template .render (
10394 host = host ,
@@ -107,69 +98,122 @@ def chat():
10798 )
10899
109100 # Get available models
110- models = get_ollama_models (client )
101+ models = get_ollama_models (client , app . logger )
111102 if not model and models and "Error" not in models [0 ]:
112103 model = models [0 ]
113- l .debug (f"No model selected, using default: { model } " )
104+ app . logger .debug (f"No model selected, using default: { model } " )
114105 elif not model :
115106 model = "No models available"
116- l .debug ("No valid models found, setting model to 'No models available'" )
107+ app . logger .debug ("No valid models found, setting model to 'No models available'" )
117108
118109 # Load chat history for selected model (always defined)
119- chat_history = load_chat_history (model ) if model != "No models available" else []
120- l .debug (f"Initial chat history for { model } : { chat_history } " )
110+ chat_history = load_chat_history (model , app . logger ) if model != "No models available" else []
111+ app . logger .debug (f"Initial chat history for { model } : { chat_history } " )
121112
122113 if request .method == 'POST' and 'prompt' in request .form :
123114 prompt = request .form ['prompt' ].strip ()
124- l .debug (f"Received prompt: '{ prompt } '" )
115+ app . logger .debug (f"Received prompt: '{ prompt } '" )
125116 if prompt : # Only process non-empty prompts
126117 try :
127118 if "Error" not in model and model != "No models available" :
128- l .debug (f"Sending prompt to Ollama: model={ model } , prompt='{ prompt } '" )
119+ app . logger .debug (f"Sending prompt to Ollama: model={ model } , prompt='{ prompt } '" )
129120 response = client .generate (model = model , prompt = prompt )
130- l .debug (f"Full response from Ollama: { response } " )
121+ app . logger .debug (f"Full response from Ollama: { response } " )
131122 reply = response .get ('response' , 'No response field in reply' )
132- l .debug (f"Extracted reply: '{ reply } '" )
123+ app . logger .debug (f"Extracted reply: '{ reply } '" )
133124 formatted_reply = markdown .markdown (reply )
134- l .debug (f"Formatted reply: '{ formatted_reply } '" )
125+ app . logger .debug (f"Formatted reply: '{ formatted_reply } '" )
135126 chat_history .append ({"user" : prompt , "bot" : formatted_reply })
136- l .debug (f"Updated chat history: { chat_history } " )
137- save_chat_history (model , chat_history )
127+ app . logger .debug (f"Updated chat history: { chat_history } " )
128+ save_chat_history (model , chat_history , app . logger )
138129 else :
139130 chat_history .append ({"user" : prompt , "bot" : "Cannot generate: No valid model selected" })
140- l .debug ("No valid model, added error message to history" )
131+ app . logger .debug ("No valid model, added error message to history" )
141132 except Exception as e :
142- l .error (f"Error during generation: { str (e )} " )
133+ app . logger .error (f"Error during generation: { str (e )} " )
143134 chat_history .append ({"user" : prompt , "bot" : f"Error: { str (e )} " })
144- save_chat_history (model , chat_history )
135+ save_chat_history (model , chat_history , app . logger )
145136 else :
146- l .debug ("Empty prompt, reloading chat history for selected model" )
137+ app . logger .debug ("Empty prompt, reloading chat history for selected model" )
147138 # No prompt, just reload history for the selected model
148139
149140 # Log all variables before rendering
150- l .debug ("Rendering template with variables:" )
151- l .debug (f"host: { host } " )
152- l .debug (f"models: { models } " )
153- l .debug (f"selected_model: { model } " )
154- l .debug (f"chat_history: { chat_history } " )
141+ app . logger .debug ("Rendering template with variables:" )
142+ app . logger .debug (f"host: { host } " )
143+ app . logger .debug (f"models: { models } " )
144+ app . logger .debug (f"selected_model: { model } " )
145+ app . logger .debug (f"chat_history: { chat_history } " )
155146
156147 # Load and render the template explicitly
157148 try :
158- l .debug ("Loading chat.html from templates directory" )
149+ app . logger .debug ("Loading chat.html from templates directory" )
159150 jj_template = jj_env .get_template ('chat.html' )
160- l .debug ("Rendering chat.html" )
151+ app . logger .debug ("Rendering chat.html" )
161152 rendered_html = jj_template .render (
162153 host = host ,
163154 models = models ,
164155 selected_model = model ,
165156 chat_history = chat_history
166157 )
167- l .debug ("chat.html rendered successfully" )
158+ app . logger .debug ("chat.html rendered successfully" )
168159 return rendered_html
169160 except Exception as e :
170- l .error (f"Failed to render chat.html: { str (e )} " )
161+ app . logger .error (f"Failed to render chat.html: { str (e )} " )
171162 return f"Error rendering template: { str (e )} " , 500
172163
173- if __name__ == '__main__' :
174- l .debug ("Starting Flask application" )
175- app .run (debug = True , host = '0.0.0.0' , port = 5000 )
164+ def main (args ):
165+ app .logger .debug ("Starting Flask application" )
166+ app .run (debug = True , host = '0.0.0.0' , port = args .port )
167+
168+ if __name__ == "__main__" :
169+ """ This is executed when run from the command line """
170+
171+ description = """
172+ A simple Flask-based chat application interfacing with Ollama.
173+ """
174+
175+ epilog = """
176+ GPL 3.0 License.
177+ """
178+
179+ parser = argparse .ArgumentParser (description = description ,
180+ epilog = epilog ,
181+ formatter_class = argparse .ArgumentDefaultsHelpFormatter )
182+
183+ # Optional verbosity counter (eg. -v, -vv, -vvv, etc.)
184+ parser .add_argument ('-v' , '--verbose' ,
185+ action = "count" ,
186+ default = 0 ,
187+ help = "Verbosity (-v, -vv, etc)" )
188+
189+ # Optional port argument
190+ parser .add_argument ('-p' , '--port' ,
191+ action = "store" ,
192+ type = int ,
193+ default = 5000 ,
194+ help = "Port to run the Flask app on" )
195+
196+ args = parser .parse_args ()
197+
198+ # Set up logging
199+ l = logging .getLogger () # Root logger per your convention
200+ fmtdebug = logging .Formatter ('%(levelname)s: [%(funcName)s():%(lineno)i] %(message)s' )
201+ fmtinfo = logging .Formatter ('%(levelname)s: %(message)s' )
202+ handler = logging .StreamHandler (sys .stdout )
203+
204+ if args .verbose > 0 :
205+ handler .setFormatter (fmtdebug )
206+ l .addHandler (handler )
207+ l .setLevel (logging .DEBUG )
208+ print (f'verbose = { args .verbose } ' )
209+ else :
210+ handler .setFormatter (fmtinfo )
211+ l .addHandler (handler )
212+ l .setLevel (logging .INFO )
213+
214+ # Configure Flask app logger to use the same setup
215+ app .logger .handlers = []
216+ app .logger .addHandler (handler )
217+ app .logger .setLevel (l .level )
218+
219+ main (args )
0 commit comments