Skip to content

Commit b3f6324

Browse files
committed
fix docker build
1 parent 748d253 commit b3f6324

File tree

7 files changed

+15
-17
lines changed

7 files changed

+15
-17
lines changed

.dockerignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
data
2+
tmp

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ WORKDIR /app
5555

5656
# Copy requirements and install Python dependencies
5757
COPY requirements.txt .
58-
RUN pip install --no-cache-dir -r requirements.txt
58+
RUN pip install -r requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple
5959

6060
# Install Playwright and browsers with system dependencies
6161
ENV PLAYWRIGHT_BROWSERS_PATH=/ms-playwright

docker-compose.yml

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,9 @@ services:
2828
- RESOLUTION_WIDTH=${RESOLUTION_WIDTH:-1920}
2929
- RESOLUTION_HEIGHT=${RESOLUTION_HEIGHT:-1080}
3030
- VNC_PASSWORD=${VNC_PASSWORD:-vncpassword}
31-
- PERSISTENT_BROWSER_PORT=9222
32-
- PERSISTENT_BROWSER_HOST=localhost
3331
- CHROME_DEBUGGING_PORT=9222
3432
- CHROME_DEBUGGING_HOST=localhost
3533
volumes:
36-
- ./data:/app/data
37-
- ./data/chrome_data:/app/data/chrome_data
3834
- /tmp/.X11-unix:/tmp/.X11-unix
3935
restart: unless-stopped
4036
shm_size: '2gb'

src/utils/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ def get_llm_model(provider: str, **kwargs):
8888
return ChatOllama(
8989
model=kwargs.get("model_name", "qwen2.5:7b"),
9090
temperature=kwargs.get("temperature", 0.0),
91-
num_ctx=128000,
91+
num_ctx=kwargs.get("num_ctx", 32000),
9292
base_url=kwargs.get("base_url", "http://localhost:11434"),
9393
)
9494
elif provider == "azure_openai":

supervisord.conf

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ depends_on=x11vnc
5959

6060
[program:persistent_browser]
6161
command=bash -c 'mkdir -p /app/data/chrome_data && sleep 8 && google-chrome --user-data-dir=/app/data/chrome_data --window-position=0,0 --window-size=%(ENV_RESOLUTION_WIDTH)s,%(ENV_RESOLUTION_HEIGHT)s --start-maximized --no-sandbox --disable-dev-shm-usage --disable-gpu --disable-software-rasterizer --disable-setuid-sandbox --no-first-run --no-default-browser-check --no-experiments --ignore-certificate-errors --remote-debugging-port=9222 --remote-debugging-address=0.0.0.0 "data:text/html,<html><body style=\"background: \#f0f0f0; margin: 0; display: flex; justify-content: center; align-items: center; height: 100vh; font-family: Arial;\"><h1>Browser Ready for AI Interaction</h1></body></html>"'
62-
autorestart=%(ENV_CHROME_PERSISTENT_SESSION)s
62+
autorestart=true
6363
stdout_logfile=/dev/stdout
6464
stdout_logfile_maxbytes=0
6565
stderr_logfile=/dev/stderr

tests/test_browser_use.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -247,18 +247,18 @@ async def test_browser_use_custom_v2():
247247
# api_key=os.getenv("GOOGLE_API_KEY", "")
248248
# )
249249

250-
llm = utils.get_llm_model(
251-
provider="deepseek",
252-
model_name="deepseek-chat",
253-
temperature=0.8
254-
)
255-
256250
# llm = utils.get_llm_model(
257-
# provider="ollama", model_name="qwen2.5:7b", temperature=0.8
251+
# provider="deepseek",
252+
# model_name="deepseek-chat",
253+
# temperature=0.8
258254
# )
259255

256+
llm = utils.get_llm_model(
257+
provider="ollama", model_name="qwen2.5:7b", temperature=0.5
258+
)
259+
260260
controller = CustomController()
261-
use_own_browser = True
261+
use_own_browser = False
262262
disable_security = True
263263
use_vision = False # Set to False when using DeepSeek
264264
tool_call_in_content = True # Set to True when using Ollama

tests/test_llm_api.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -127,5 +127,5 @@ def test_ollama_model():
127127
# test_openai_model()
128128
# test_gemini_model()
129129
# test_azure_openai_model()
130-
test_deepseek_model()
131-
# test_ollama_model()
130+
# test_deepseek_model()
131+
test_ollama_model()

0 commit comments

Comments
 (0)