Skip to content

Commit 5f3e70f

Browse files
committed
FastAPI setup and Docker configuration
1 parent 9a9f92f commit 5f3e70f

File tree

5 files changed

+51
-49
lines changed

5 files changed

+51
-49
lines changed

Dockerfile

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,14 @@ COPY requirements.txt /app/
1111
RUN pip install --no-cache-dir --upgrade pip \
1212
&& pip install --no-cache-dir -r requirements.txt
1313

14-
RUN pip install --user -U nltk
14+
RUN pip install -U nltk
1515

1616
# Then copy the rest of the application files
1717
COPY . /app
1818

19-
# Expose the port Flask runs on
20-
EXPOSE 5000
19+
# Expose the port FastAPI runs on
20+
EXPOSE 8000
2121

2222
# Define the entry point command
23-
CMD ["python", "app/main.py"]
23+
CMD ["uvicorn", "main:app", "--reload"]
2424

app/classifier_model.py

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,22 @@
77
import nltk
88
from nltk.corpus import stopwords
99
import gdown
10+
import os
11+
12+
file_path = "bert_model.pth" # Change to your file's actual path
1013

1114
def download_model(file_id, output_name):
12-
url = f"https://drive.google.com/uc?id={file_id}"
13-
gdown.download(url, output_name, quiet=False)
15+
url = f"https://drive.google.com/uc?id={file_id}"
16+
gdown.download(url, output_name, quiet=False)
1417

15-
# Model file IDs from Google Drive
16-
bert_model_id = "14OBJIgUtGLujlCzEaBb2Mxc5eUsAMZk5"
17-
cascade_bert_model_id = "14Shk7Yt6ilSrzFvppjSsqqZBv2RM1qwt"
18+
if not os.path.exists(file_path):
19+
# Model file IDs from Google Drive
20+
bert_model_id = "14OBJIgUtGLujlCzEaBb2Mxc5eUsAMZk5"
21+
cascade_bert_model_id = "14Shk7Yt6ilSrzFvppjSsqqZBv2RM1qwt"
1822

19-
# Download models
20-
download_model(bert_model_id, "bert_model.pth")
21-
download_model(cascade_bert_model_id, "cascade_bert_model.pth")
23+
# Download models
24+
download_model(bert_model_id, "bert_model.pth")
25+
download_model(cascade_bert_model_id, "cascade_bert_model.pth")
2226

2327

2428
nltk.download('stopwords')
@@ -102,10 +106,4 @@ def classify_text(text, model_ai_hum, model_llm, tokenizer):
102106
else:
103107
predicted_label_llm = "Gemini"
104108

105-
return {"type": predicted_label, "llm": predicted_label_llm}
106-
107-
108-
# Example usage:
109-
# abstract = df["Abstract"].values[39000]
110-
# result = classify_abstract(abstract, model_ai_hum, model_llm, tokenizer)
111-
# print(result)
109+
return {"type": predicted_label, "llm": predicted_label_llm}

app/main.py

Lines changed: 28 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,31 +1,33 @@
1-
print("svc_texpose - starting")
2-
1+
from fastapi import FastAPI, Request, Form
2+
from fastapi.templating import Jinja2Templates
3+
from fastapi.responses import HTMLResponse
34
from classifier_model import load_models, classify_text
4-
from flask import Flask, render_template, request
55

6+
print("svc_texpose - starting")
7+
8+
# Load models
69
tokenizer, model_ai_hum, model_llm = load_models()
710

8-
# Flask App
9-
app = Flask(__name__)
10-
11-
@app.route("/", methods=["GET", "POST"])
12-
def index():
13-
prediction = None
14-
if request.method == "GET":
15-
return {"svc_Texpose is alive :) "}
16-
17-
if request.method == "POST":
18-
input_text = request.form.get("text", "")
19-
if input_text.strip(): # input if not empty then proceed
20-
result = classify_text(input_text, model_ai_hum, model_llm, tokenizer)
21-
prediction = result["type"]
22-
if len(result["llm"]) > 1:
23-
prediction += " Using " + result["llm"]
24-
25-
print(prediction)
26-
return render_template("index.html", prediction=prediction if prediction is not None else "")
27-
28-
29-
if __name__ == "__main__":
30-
app.run(debug=True)
11+
# Initialize FastAPI app
12+
app = FastAPI()
13+
14+
# Set up Jinja2 templates
15+
templates = Jinja2Templates(directory="templates")
16+
17+
@app.get("/", response_class=HTMLResponse)
18+
async def root(request: Request):
19+
return templates.TemplateResponse("index.html", {"request": request, "prediction": None, "input_text": ""})
20+
21+
@app.post("/classify", response_class=HTMLResponse)
22+
async def classify(request: Request, input_text: str = Form(...)):
23+
if input_text.strip(): # Ensure input is not empty
24+
result = classify_text(input_text, model_ai_hum, model_llm, tokenizer)
25+
prediction = result["type"]
26+
27+
# Ensure "llm" exists and is not empty
28+
if result.get("llm"):
29+
prediction += f" Using {result['llm']}"
30+
31+
return templates.TemplateResponse("index.html", {"request": request, "prediction": prediction, "input_text": input_text})
3132

33+
return templates.TemplateResponse("index.html", {"request": request, "prediction": "No text provided.", "input_text": ""})

app/templates/index.html

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,10 @@
8383
<body>
8484
<div class="container">
8585
<h2><i class="fas fa-brain"></i> AI vs Human Text Detector</h2>
86-
<form method="POST">
87-
<textarea name="text" rows="5" placeholder="Type here..."
88-
class="{% if prediction == 'Human' %}human-text{% elif prediction %}ai-text{% endif %}">{{ request.form.get('text', '') }}</textarea>
86+
<form method="POST" action="/classify">
87+
<textarea name="input_text" rows="5" placeholder="Type here..."
88+
class="{% if prediction == 'Human' %}human-text{% elif prediction %}ai-text{% endif %}">{{ input_text }}</textarea>
89+
8990
<button type="submit"><i class="fas fa-search"></i> Analyze</button>
9091
</form>
9192

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,5 @@ torch
55
transformers
66
numpy
77
flask
8-
gdown
8+
gdown
9+
jinja2

0 commit comments

Comments
 (0)