Skip to content

Commit 2529fc9

Browse files
committed
0.4.2
1 parent a080d25 commit 2529fc9

File tree

3 files changed

+66
-35
lines changed

3 files changed

+66
-35
lines changed

interpreter/computer_use/loop.py

Lines changed: 48 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,16 @@
77
import os
88
import platform
99
import time
10+
import traceback
1011
import uuid
1112
from collections.abc import Callable
1213
from datetime import datetime
13-
from enum import StrEnum
14+
15+
try:
16+
from enum import StrEnum
17+
except ImportError: # 3.10 compatibility
18+
from enum import Enum as StrEnum
19+
1420
from typing import Any, List, cast
1521

1622
import requests
@@ -185,6 +191,8 @@ async def sampling_loop(
185191
elif isinstance(chunk, BetaRawContentBlockDeltaEvent):
186192
if chunk.delta.type == "text_delta":
187193
print(f"{chunk.delta.text}", end="", flush=True)
194+
yield {"type": "chunk", "chunk": chunk.delta.text}
195+
await asyncio.sleep(0)
188196
if current_block and current_block.type == "text":
189197
current_block.text += chunk.delta.text
190198
elif chunk.delta.type == "input_json_delta":
@@ -199,10 +207,13 @@ async def sampling_loop(
199207
# Finished a tool call
200208
# print()
201209
current_block.input = json.loads(current_block.partial_json)
210+
# yield {"type": "chunk", "chunk": current_block.input}
202211
delattr(current_block, "partial_json")
203212
else:
204213
# Finished a message
205214
print("\n")
215+
yield {"type": "chunk", "chunk": "\n"}
216+
await asyncio.sleep(0)
206217
response_content.append(current_block)
207218
current_block = None
208219

@@ -241,7 +252,9 @@ async def sampling_loop(
241252
tool_output_callback(result, content_block.id)
242253

243254
if not tool_result_content:
244-
return messages
255+
# Done!
256+
yield {"type": "messages", "messages": messages}
257+
break
245258

246259
messages.append({"content": tool_result_content, "role": "user"})
247260

@@ -362,6 +375,7 @@ async def main():
362375

363376
@app.post("/openai/chat/completions")
364377
async def chat_completion(request: ChatCompletionRequest):
378+
print("BRAND NEW REQUEST")
365379
# Check exit flag before processing request
366380
if exit_flag:
367381
return {"error": "Server shutting down due to mouse in corner"}
@@ -395,23 +409,36 @@ async def tool_output_callback(result: ToolResult, tool_id: str):
395409
yield chunk
396410

397411
try:
398-
result = await sampling_loop(
412+
yield f"data: {json.dumps({'choices': [{'delta': {'role': 'assistant'}}]})}\n\n"
413+
414+
messages = [m for m in messages if m["content"]]
415+
print(str(messages)[-100:])
416+
await asyncio.sleep(4)
417+
418+
async for chunk in sampling_loop(
399419
model=model,
400420
provider=provider,
401421
system_prompt_suffix=system_prompt_suffix,
402422
messages=messages, # Now using global messages
403423
output_callback=output_callback,
404424
tool_output_callback=tool_output_callback,
405425
api_key=api_key,
406-
)
426+
):
427+
if chunk["type"] == "chunk":
428+
await asyncio.sleep(0)
429+
yield f"data: {json.dumps({'choices': [{'delta': {'content': chunk['chunk']}}]})}\n\n"
430+
if chunk["type"] == "messages":
431+
messages = chunk["messages"]
407432

408-
# # Yield all stored chunks
409-
# for chunk in response_chunks:
410-
# yield chunk
433+
yield f"data: {json.dumps({'choices': [{'delta': {'content': '', 'finish_reason': 'stop'}}]})}\n\n"
411434

412435
except Exception as e:
413-
print(f"Error: {e}")
414-
yield f"data: {json.dumps({'error': str(e)})}\n\n"
436+
print("Error: An exception occurred.")
437+
print(traceback.format_exc())
438+
pass
439+
# raise
440+
# print(f"Error: {e}")
441+
# yield f"data: {json.dumps({'error': str(e)})}\n\n"
415442

416443
return StreamingResponse(stream_response(), media_type="text/event-stream")
417444

@@ -436,20 +463,22 @@ async def tool_output_callback(result: ToolResult, tool_id: str):
436463
import random
437464

438465
tips = [
439-
"You can type `i` in your terminal to use Open Interpreter.",
440-
"Type `wtf` in your terminal to have Open Interpreter fix the last error.",
441-
"You can type prompts after `i` in your terminal, for example, `i want you to install node`. (Yes, really.)",
466+
# "You can type `i` in your terminal to use Open Interpreter.",
467+
"**Tip:** Type `wtf` in your terminal to have Open Interpreter fix the last error.",
468+
# "You can type prompts after `i` in your terminal, for example, `i want you to install node`. (Yes, really.)",
469+
"We recommend using our desktop app for the best experience. Type `d` for early access.",
470+
"**Tip:** Reduce display resolution for better performance.",
442471
]
443472

444473
random_tip = random.choice(tips)
445474

446475
markdown_text = f"""> Model set to `Claude 3.5 Sonnet (New)`, OS control enabled
447476
448-
We recommend using our desktop app for the best experience. Type `d` for early access.
477+
{random_tip}
449478
450479
**Warning:** This AI has full system access and can modify files, install software, and execute commands. By continuing, you accept all risks and responsibility.
451480
452-
Move your mouse to any corner of the screen to exit. Reduce display resolution for better performance.
481+
Move your mouse to any corner of the screen to exit.
453482
"""
454483

455484
print_markdown(markdown_text)
@@ -496,17 +525,19 @@ def tool_output_callback(result: ToolResult, tool_id: str):
496525
print(f"---\n{result.error}\n---")
497526

498527
try:
499-
messages = await sampling_loop(
528+
async for chunk in sampling_loop(
500529
model=model,
501530
provider=provider,
502531
system_prompt_suffix=system_prompt_suffix,
503532
messages=messages,
504533
output_callback=output_callback,
505534
tool_output_callback=tool_output_callback,
506535
api_key=api_key,
507-
)
536+
):
537+
if chunk["type"] == "messages":
538+
messages = chunk["messages"]
508539
except Exception as e:
509-
print(f"An error occurred: {e}")
540+
raise
510541

511542
# The thread will automatically terminate when the main program exits
512543

poetry.lock

Lines changed: 16 additions & 16 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,8 +33,6 @@ torchvision = { version = "^0.18.0", optional = true }
3333
easyocr = { version = "^1.7.1", optional = true }
3434

3535
# Optional [server] dependencies
36-
fastapi = { version = "^0.111.0", optional = true }
37-
uvicorn = { version = "^0.30.1", optional = true }
3836
janus = { version = "^1.0.0", optional = true }
3937

4038
# Required dependencies
@@ -71,6 +69,8 @@ webdriver-manager = "^4.0.2"
7169
anthropic = "^0.37.1"
7270
pyautogui = "^0.9.54"
7371
typer = "^0.12.5"
72+
fastapi = "^0.111.0"
73+
uvicorn = "^0.30.1"
7474

7575
[tool.poetry.extras]
7676
os = ["opencv-python", "pyautogui", "plyer", "pywinctl", "pytesseract", "sentence-transformers", "ipywidgets", "timm", "screeninfo"]

0 commit comments

Comments
 (0)