Skip to content

Commit 36ced2d

Browse files
committed
remove debug prints
1 parent 0353cac commit 36ced2d

File tree

1 file changed

+0
-4
lines changed

1 file changed

+0
-4
lines changed

tabmemcheck/llm.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -301,9 +301,6 @@ def __init__(self, model: str):
301301
self.chat_mode = True
302302

303303
def chat_completion(self, messages, temperature, max_tokens):
304-
print("Received messages:")
305-
for msg in messages:
306-
print(f"Role: {msg['role']}, Content: {repr(msg['content'])}")
307304
# Extract system prompt if present
308305
system_prompt = None
309306
for message in messages:
@@ -342,7 +339,6 @@ def chat_completion(self, messages, temperature, max_tokens):
342339
if system_prompt:
343340
message_args["system"] = system_prompt
344341

345-
print(f"{message_args=}")
346342
response = self.anthropic.messages.create(**message_args)
347343
return response.content[0].text
348344
except Exception as e:

0 commit comments

Comments
 (0)