-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathserver.py
More file actions
117 lines (101 loc) · 3.77 KB
/
server.py
File metadata and controls
117 lines (101 loc) · 3.77 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
# 1. Chatbot Logic Imports
import nltk
from nltk.stem import WordNetLemmatizer
import numpy as np
import json
import random
import pickle
from tensorflow.keras.models import load_model
# 2. Flask Imports
from flask import Flask, render_template, request, jsonify
# --- Chatbot Initialization (Model Loading) ---
lemmatizer = WordNetLemmatizer()
# Ensure 'intents.json' is in the same directory as server.py
try:
with open('intents.json', encoding='utf-8') as file:
data = json.load(file)
except FileNotFoundError:
print("CRITICAL ERROR: intents.json not found.")
exit()
try:
# Load model and data files
# Ensure 'training_data.pkl' and 'chatbot_model.h5' are in the same directory
with open('training_data.pkl', 'rb') as f:
data_pkl = pickle.load(f)
words = data_pkl['words']
classes = data_pkl['classes']
model = load_model('chatbot_model.h5')
print("Chatbot Model and Data loaded for Web Server.")
except FileNotFoundError as e:
print(f"CRITICAL ERROR: Could not load model/data: {e}. Make sure you ran python chatbot_model.py successfully.")
exit()
except Exception as e:
print(f"CRITICAL ERROR during model loading: {e}")
exit()
# --- Flask App Setup ---
app = Flask(__name__)
# --- Chatbot Helper Functions ---
def clean_up_sentence(sentence):
sentence_words = nltk.word_tokenize(sentence)
# Handle potential LookupError for 'wordnet' here if it happens again
try:
sentence_words = [lemmatizer.lemmatize(word.lower()) for word in sentence_words]
except LookupError:
print("\nNLTK Data Missing! Running nltk.download('wordnet') now...")
nltk.download('wordnet')
sentence_words = [lemmatizer.lemmatize(word.lower()) for word in sentence_words]
return sentence_words
def bag_of_words(sentence, words):
sentence_words = clean_up_sentence(sentence)
bag = [0] * len(words)
for s in sentence_words:
for i, w in enumerate(words):
if w == s:
bag[i] = 1
return np.array(bag)
def predict_class(sentence):
p = bag_of_words(sentence, words)
res = model.predict(np.array([p]), verbose=0)[0] # Added verbose=0 for cleaner output
ERROR_THRESHOLD = 0.75
results = [[i, r] for i, r in enumerate(res) if r > ERROR_THRESHOLD]
results.sort(key=lambda x: x[1], reverse=True)
return_list = []
for r in results:
return_list.append({"intent": classes[r[0]], "probability": str(r[1])})
if not return_list:
# Default tag if no confidence is high enough
return [{"intent": "no_match", "probability": "1.0"}]
return return_list
def get_response(intents_list, intents_json):
tag = intents_list[0]['intent']
list_of_intents = intents_json['intents']
for i in list_of_intents:
if i['tag'] == tag:
result = random.choice(i['responses'])
break
else:
# This should theoretically not be reached due to predict_class
result = "I'm sorry, I don't understand that."
return result
# --- Web Routes ---
# 1. Main Chat Page
@app.route('/')
def home():
# Looks for index.html inside the 'templates' folder
return render_template('index.html')
# 2. API Endpoint for Chatting
@app.route('/chat', methods=['POST'])
def chat():
# User ka message JSON format mein aayega
msg = request.get_json().get("message")
if not msg:
return jsonify({"response": "Please send a valid message."})
# Chatbot se response generate karna
intents = predict_class(msg)
response_text = get_response(intents, data)
# Response ko JSON format mein web page par wapas bhejna
return jsonify({"response": response_text})
# --- Run Server ---
if __name__ == "__main__":
# Ensure you use 'Ctrl + C' to stop this server
app.run(debug=True)