-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
64 lines (52 loc) · 1.82 KB
/
main.py
File metadata and controls
64 lines (52 loc) · 1.82 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
import os, json
from fastapi import FastAPI
import psycopg2
import redis
PG_HOST = os.getenv("PG_HOST", "pg-rw")
PG_DB = os.getenv("PG_DB", "appdb")
PG_USER = os.getenv("PG_USER", "appuser")
PG_PASS = os.getenv("PG_PASS", "apppass")
REDIS_HOST = os.getenv("REDIS_HOST", "dragonfly")
REDIS_PORT = int(os.getenv("REDIS_PORT", "6379"))
CACHE_KEY = "todos:all"
app = FastAPI()
r = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True)
def pg_conn():
return psycopg2.connect(
host=PG_HOST, dbname=PG_DB, user=PG_USER, password=PG_PASS
)
@app.on_event("startup")
def init_db():
with pg_conn() as conn:
with conn.cursor() as cur:
cur.execute("""
CREATE TABLE IF NOT EXISTS todos (
id SERIAL PRIMARY KEY,
title TEXT NOT NULL
);
""")
conn.commit()
@app.get("/todos")
def list_todos():
cached = r.get(CACHE_KEY)
if cached:
return {"source": "cache(dragonfly)", "items": json.loads(cached)}
with pg_conn() as conn:
with conn.cursor() as cur:
cur.execute("SELECT id, title FROM todos ORDER BY id DESC;")
rows = cur.fetchall()
items = [{"id": i, "title": t} for (i, t) in rows]
r.set(CACHE_KEY, json.dumps(items), ex=30) # 30s TTL
return {"source": "db(postgres)", "items": items}
@app.post("/todos")
def add_todo(payload: dict):
title = payload.get("title", "").strip()
if not title:
return {"ok": False, "error": "title is required"}
with pg_conn() as conn:
with conn.cursor() as cur:
cur.execute("INSERT INTO todos (title) VALUES (%s) RETURNING id;", (title,))
new_id = cur.fetchone()[0]
conn.commit()
r.delete(CACHE_KEY) # invalidate cache
return {"ok": True, "id": new_id, "title": title}