Skip to content

Commit c05103e

Browse files
authored
refactor(backend): migrate print() to structured logging (#5067)
1 parent e109a7b commit c05103e

File tree

126 files changed

+1822
-1592
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

126 files changed

+1822
-1592
lines changed

backend/database/action_items.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@
44
from google.cloud.firestore_v1 import FieldFilter
55

66
from ._client import db
7+
import logging
8+
9+
logger = logging.getLogger(__name__)
710

811

912
# Collection name
@@ -465,4 +468,4 @@ def unlock_all_action_items(uid: str):
465468
count = 0
466469
if count > 0:
467470
batch.commit()
468-
print(f"Unlocked all action items for user {uid}")
471+
logger.info(f"Unlocked all action items for user {uid}")

backend/database/apps.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,9 @@
1010
from models.app import UsageHistoryType
1111
from ._client import db
1212
from .redis_db import get_app_reviews
13+
import logging
14+
15+
logger = logging.getLogger(__name__)
1316

1417
# *****************************
1518
# ********** CRUD *************
@@ -23,7 +26,7 @@
2326
def migrate_reviews_from_redis_to_firestore():
2427
apps_ref = db.collection(apps_collection).stream()
2528
for app in apps_ref:
26-
print('migrating reviews for app:', app.id)
29+
logger.info(f'migrating reviews for app: {app.id}')
2730
app_id = app.id
2831
reviews = get_app_reviews(app_id)
2932
for uid, review in reviews.items():

backend/database/auth.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
from firebase_admin import auth
22

33
from database.redis_db import cache_user_name, get_cached_user_name
4+
import logging
5+
6+
logger = logging.getLogger(__name__)
47

58

69
def get_user_from_uid(uid: str):
710
try:
811
user = auth.get_user(uid) if uid else None
912
except Exception as e:
10-
print(e)
13+
logger.error(e)
1114
user = None
1215
if not user:
1316
return None

backend/database/cache.py

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -29,13 +29,9 @@ def _ensure_initialized():
2929

3030
# Register callbacks: when invalidation message received, clear memory cache
3131
_pubsub_manager.register_callback(
32-
'get_public_approved_apps_data*',
33-
lambda keys: [_memory_cache.delete(k) for k in keys]
34-
)
35-
_pubsub_manager.register_callback(
36-
'get_popular_apps_data',
37-
lambda keys: [_memory_cache.delete(k) for k in keys]
32+
'get_public_approved_apps_data*', lambda keys: [_memory_cache.delete(k) for k in keys]
3833
)
34+
_pubsub_manager.register_callback('get_popular_apps_data', lambda keys: [_memory_cache.delete(k) for k in keys])
3935

4036
# Start pub/sub subscription
4137
_pubsub_manager.start()

backend/database/cache_manager.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
@dataclass
2121
class CacheEntry:
2222
"""Represents a cache entry with metadata."""
23+
2324
data: Any
2425
timestamp: float
2526
size_bytes: int
@@ -149,12 +150,7 @@ def set(self, key: str, data: Any, ttl: int = 30):
149150
self._evict_if_needed(size_bytes)
150151

151152
# Add new entry
152-
entry = CacheEntry(
153-
data=data,
154-
timestamp=time.time(),
155-
size_bytes=size_bytes,
156-
ttl=ttl
157-
)
153+
entry = CacheEntry(data=data, timestamp=time.time(), size_bytes=size_bytes, ttl=ttl)
158154
self.cache[key] = entry
159155
self.current_size += size_bytes
160156

@@ -195,8 +191,7 @@ def _evict_if_needed(self, required_bytes: int):
195191
Args:
196192
required_bytes: Bytes needed for new entry
197193
"""
198-
while (self.current_size + required_bytes > self.max_memory_bytes
199-
and len(self.cache) > 0):
194+
while self.current_size + required_bytes > self.max_memory_bytes and len(self.cache) > 0:
200195
# Remove oldest (first item in OrderedDict)
201196
key, entry = self.cache.popitem(last=False)
202197
self.current_size -= entry.size_bytes
@@ -237,5 +232,5 @@ def get_stats(self) -> dict:
237232
'hits': self.hits,
238233
'misses': self.misses,
239234
'hit_rate': round(hit_rate, 2),
240-
'evictions': self.evictions
235+
'evictions': self.evictions,
241236
}

backend/database/chat.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,9 @@
1212
from utils.other.endpoints import timeit
1313
from ._client import db
1414
from .helpers import set_data_protection_level, prepare_for_write, prepare_for_read
15+
import logging
16+
17+
logger = logging.getLogger(__name__)
1518

1619
# *********************************
1720
# ******* ENCRYPTION HELPERS ******
@@ -176,7 +179,7 @@ def get_messages(
176179
chat_session_id: Optional[str] = None,
177180
# include_plugin_id_filter: bool = True,
178181
):
179-
print('get_messages', uid, limit, offset, app_id, include_conversations)
182+
logger.info(f'get_messages {uid} {limit} {offset} {app_id} {include_conversations}')
180183
user_ref = db.collection('users').document(uid)
181184
messages_ref = user_ref.collection('messages')
182185
# if include_plugin_id_filter:
@@ -261,7 +264,7 @@ def report_message(uid: str, msg_doc_id: str):
261264
message_ref.update({'reported': True})
262265
return {"message": "Message reported"}
263266
except Exception as e:
264-
print("Update failed:", e)
267+
logger.error(f"Update failed: {e}")
265268
return {"message": f"Update failed: {e}"}
266269

267270

@@ -278,15 +281,15 @@ def update_message_rating(uid: str, message_id: str, rating: int | None):
278281
message_ref = user_ref.collection('messages').where('id', '==', message_id).limit(1).stream()
279282
message_doc = next(message_ref, None)
280283
if not message_doc:
281-
print(f"⚠️ Message {message_id} not found for user {uid}")
284+
logger.warning(f"⚠️ Message {message_id} not found for user {uid}")
282285
return False
283286

284287
try:
285288
user_ref.collection('messages').document(message_doc.id).update({'rating': rating})
286-
print(f"✅ Updated message {message_id} rating to {rating}")
289+
logger.info(f"✅ Updated message {message_id} rating to {rating}")
287290
return True
288291
except Exception as e:
289-
print(f"❌ Failed to update message rating: {e}")
292+
logger.error(f"❌ Failed to update message rating: {e}")
290293
return False
291294

292295

@@ -297,32 +300,32 @@ def batch_delete_messages(
297300
messages_ref = messages_ref.where(filter=FieldFilter('plugin_id', '==', app_id))
298301
if chat_session_id:
299302
messages_ref = messages_ref.where(filter=FieldFilter('chat_session_id', '==', chat_session_id))
300-
print('batch_delete_messages', app_id)
303+
logger.info(f'batch_delete_messages {app_id}')
301304

302305
while True:
303306
docs_stream = messages_ref.limit(batch_size).stream()
304307
docs_list = list(docs_stream)
305308

306309
if not docs_list:
307-
print("No more messages to delete")
310+
logger.info("No more messages to delete")
308311
break
309312

310313
batch = db.batch()
311314
for doc in docs_list:
312315
batch.delete(doc.reference)
313316
batch.commit()
314317

315-
print(f'Deleted {len(docs_list)} messages')
318+
logger.info(f'Deleted {len(docs_list)} messages')
316319

317320
if len(docs_list) < batch_size:
318-
print("Processed all messages")
321+
logger.info("Processed all messages")
319322
break
320323

321324

322325
def clear_chat(uid: str, app_id: Optional[str] = None, chat_session_id: Optional[str] = None):
323326
try:
324327
user_ref = db.collection('users').document(uid)
325-
print(f"Deleting messages for user: {uid}")
328+
logger.info(f"Deleting messages for user: {uid}")
326329
if not user_ref.get().exists:
327330
return {"message": "User not found"}
328331
batch_delete_messages(user_ref, app_id=app_id, chat_session_id=chat_session_id)
@@ -474,7 +477,7 @@ def update_chat_session_openai_ids(uid: str, chat_session_id: str, thread_id: st
474477

475478
if update_data:
476479
session_ref.update(update_data)
477-
print(f"Updated session {chat_session_id} with thread {thread_id} and assistant {assistant_id}")
480+
logger.info(f"Updated session {chat_session_id} with thread {thread_id} and assistant {assistant_id}")
478481

479482

480483
# **************************************
@@ -512,7 +515,7 @@ def migrate_chats_level_batch(uid: str, message_doc_ids: List[str], target_level
512515

513516
for doc_snapshot in doc_snapshots:
514517
if not doc_snapshot.exists:
515-
print(f"Message {doc_snapshot.id} not found, skipping.")
518+
logger.warning(f"Message {doc_snapshot.id} not found, skipping.")
516519
continue
517520

518521
message_data = doc_snapshot.to_dict()

backend/database/conversations.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,9 @@
2222
from ._client import db
2323
from .helpers import set_data_protection_level, prepare_for_write, prepare_for_read, with_photos
2424
from utils.other.storage import list_audio_chunks
25+
import logging
26+
27+
logger = logging.getLogger(__name__)
2528

2629
conversations_collection = 'conversations'
2730

@@ -58,7 +61,7 @@ def _decrypt_conversation_data(conversation_data: Dict[str, Any], uid: str) -> D
5861
else:
5962
data['transcript_segments'] = json.loads(decrypted_payload)
6063
except (json.JSONDecodeError, TypeError, zlib.error, ValueError) as e:
61-
print(e, uid)
64+
logger.error(f"{e} {uid}")
6265
data['transcript_segments'] = []
6366
# backward compatibility, will be removed soon
6467
elif isinstance(data['transcript_segments'], bytes):
@@ -68,7 +71,7 @@ def _decrypt_conversation_data(conversation_data: Dict[str, Any], uid: str) -> D
6871
decompressed_json = zlib.decompress(compressed_bytes).decode('utf-8')
6972
data['transcript_segments'] = json.loads(decompressed_json)
7073
except (json.JSONDecodeError, TypeError, zlib.error, ValueError) as e:
71-
print(e, uid)
74+
logger.error(f"{e} {uid}")
7275
data['transcript_segments'] = []
7376

7477
return data
@@ -106,7 +109,7 @@ def _prepare_conversation_for_read(conversation_data: Optional[Dict[str, Any]],
106109
decompressed_json = zlib.decompress(data['transcript_segments']).decode('utf-8')
107110
data['transcript_segments'] = json.loads(decompressed_json)
108111
except (json.JSONDecodeError, TypeError, zlib.error) as e:
109-
print(e)
112+
logger.error(e)
110113
pass
111114

112115
return data
@@ -600,7 +603,7 @@ def migrate_conversations_level_batch(uid: str, conversation_ids: List[str], tar
600603

601604
for doc_snapshot in doc_snapshots:
602605
if not doc_snapshot.exists:
603-
print(f"Conversation {doc_snapshot.id} not found, skipping.")
606+
logger.warning(f"Conversation {doc_snapshot.id} not found, skipping.")
604607
continue
605608

606609
conversation_data = doc_snapshot.to_dict()
@@ -908,7 +911,7 @@ def unlock_all_conversations(uid: str):
908911
count = 0
909912
if count > 0:
910913
batch.commit()
911-
print(f"Unlocked all conversations for user {uid}")
914+
logger.info(f"Unlocked all conversations for user {uid}")
912915

913916

914917
def get_public_conversations(data: List[Tuple[str, str]]):
@@ -1050,13 +1053,13 @@ def get_closest_conversation_to_timestamps(uid: str, start_timestamp: int, end_t
10501053
)
10511054

10521055
conversations = [doc.to_dict() for doc in query.stream()]
1053-
print('get_closest_conversation_to_timestamps len(conversations)', len(conversations))
1056+
logger.info(f'get_closest_conversation_to_timestamps len(conversations) {len(conversations)}')
10541057
if not conversations:
10551058
return None
10561059

1057-
print('get_closest_conversation_to_timestamps found:')
1060+
logger.info('get_closest_conversation_to_timestamps found:')
10581061
for conversation in conversations:
1059-
print('-', conversation['id'], conversation['started_at'], conversation['finished_at'])
1062+
logger.info(f'- {conversation['id']} {conversation['started_at']} {conversation['finished_at']}')
10601063

10611064
# get the conversation that has the closest start timestamp or end timestamp
10621065
closest_conversation = None
@@ -1070,7 +1073,7 @@ def get_closest_conversation_to_timestamps(uid: str, start_timestamp: int, end_t
10701073
min_diff = min(diff1, diff2)
10711074
closest_conversation = conversation
10721075

1073-
print('get_closest_conversation_to_timestamps closest_conversation:', closest_conversation['id'])
1076+
logger.info(f'get_closest_conversation_to_timestamps closest_conversation: {closest_conversation['id']}')
10741077
return closest_conversation
10751078

10761079

backend/database/folders.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
from ._client import db
99
from models.folder import Folder
1010

11-
1211
# System folders that are created for new users
1312
SYSTEM_FOLDERS = [
1413
{

backend/database/helpers.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,9 @@
66

77
from database import users as users_db, redis_db
88
from ._client import db
9+
import logging
10+
11+
logger = logging.getLogger(__name__)
912

1013

1114
def set_data_protection_level(data_arg_name: str):
@@ -66,7 +69,7 @@ def wrapper(*args, **kwargs):
6669
level = user_profile.get('data_protection_level', 'enhanced') if user_profile else 'enhanced'
6770
redis_db.set_user_data_protection_level(uid, level)
6871
except Exception as e:
69-
print(f"Failed to get user profile for {uid}: {e}")
72+
logger.error(f"Failed to get user profile for {uid}: {e}")
7073
level = 'enhanced'
7174

7275
if not level:

0 commit comments

Comments
 (0)