-
-
Notifications
You must be signed in to change notification settings - Fork 6k
Feat/docker PDF strategy #1518
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Feat/docker PDF strategy #1518
Changes from 6 commits
9c6e016
057fb61
5adc9dc
d231d61
c709082
481e7fe
726f41a
47bd392
e17484f
94de653
bc3b3d1
44b280b
21f80ff
1636383
056721a
ec2f88a
9681567
87f35b8
2b80f98
703243d
14994d8
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,11 +1,11 @@ | ||
| # crawler_pool.py (new file) | ||
| import asyncio, json, hashlib, time, psutil | ||
| from contextlib import suppress | ||
| from typing import Dict | ||
| from typing import Dict, Optional | ||
| from crawl4ai import AsyncWebCrawler, BrowserConfig | ||
| from typing import Dict | ||
| from utils import load_config | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion | 🟠 Major 🧩 Analysis chainVerify import path reliability for utils. Absolute import Run: 🏁 Script executed: #!/bin/bash
# Check if deploy/docker is a package and whether utils is importable.
set -euo pipefail
if [ -f "deploy/docker/__init__.py" ]; then
echo "deploy/docker is a package."
else
echo "deploy/docker is NOT a package; relative imports won't work."
fi
echo "Listing files next to crawler_pool.py:"
ls -la deploy/docker || true
echo "Searching for demo entrypoint:"
fd -a --exact-path demo_docker_api.py || trueLength of output: 1558 Convert utils import to package-relative
🤖 Prompt for AI Agents |
||
|
|
||
|
|
||
| CONFIG = load_config() | ||
|
|
||
| POOL: Dict[str, AsyncWebCrawler] = {} | ||
|
|
@@ -15,20 +15,31 @@ | |
| MEM_LIMIT = CONFIG.get("crawler", {}).get("memory_threshold_percent", 95.0) # % RAM – refuse new browsers above this | ||
| IDLE_TTL = CONFIG.get("crawler", {}).get("pool", {}).get("idle_ttl_sec", 1800) # close if unused for 30 min | ||
|
|
||
| def _sig(cfg: BrowserConfig) -> str: | ||
| payload = json.dumps(cfg.to_dict(), sort_keys=True, separators=(",",":")) | ||
| return hashlib.sha1(payload.encode()).hexdigest() | ||
| def _sig(cfg: BrowserConfig, crawler_strategy: Optional[object] = None) -> str: | ||
| """ | ||
| Generate a unique signature for a crawler based on browser config | ||
| and optional crawler strategy. This ensures that crawlers with | ||
| different strategies (e.g., PDF) are stored separately in the pool. | ||
| """ | ||
| payload = cfg.to_dict() | ||
|
|
||
| if crawler_strategy is not None: | ||
| payload["strategy"] = crawler_strategy.__class__.__name__ | ||
|
|
||
|
Comment on lines
+26
to
+28
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Include strategy configuration in the pool signature. Keying pooled crawlers only by 🤖 Prompt for AI Agents |
||
| json_payload = json.dumps(payload, sort_keys=True, separators=(",", ":")) | ||
| return hashlib.sha1(json_payload.encode()).hexdigest() | ||
|
|
||
|
|
||
| async def get_crawler(cfg: BrowserConfig) -> AsyncWebCrawler: | ||
| async def get_crawler(cfg: BrowserConfig, crawler_strategy: Optional[object] = None) -> AsyncWebCrawler: | ||
| try: | ||
| sig = _sig(cfg) | ||
| sig = _sig(cfg, crawler_strategy=crawler_strategy) | ||
| async with LOCK: | ||
coderabbitai[bot] marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| if sig in POOL: | ||
| LAST_USED[sig] = time.time(); | ||
| return POOL[sig] | ||
| if psutil.virtual_memory().percent >= MEM_LIMIT: | ||
| raise MemoryError("RAM pressure – new browser denied") | ||
| crawler = AsyncWebCrawler(config=cfg, thread_safe=False) | ||
| crawler = AsyncWebCrawler(config=cfg, thread_safe=False, crawler_strategy=crawler_strategy) | ||
| await crawler.start() | ||
| POOL[sig] = crawler; LAST_USED[sig] = time.time() | ||
| return crawler | ||
|
|
@@ -44,6 +55,7 @@ async def get_crawler(cfg: BrowserConfig) -> AsyncWebCrawler: | |
| POOL.pop(sig, None) | ||
| LAST_USED.pop(sig, None) | ||
| # If we failed to start the browser, we should remove it from the pool | ||
|
|
||
| async def close_all(): | ||
| async with LOCK: | ||
| await asyncio.gather(*(c.close() for c in POOL.values()), return_exceptions=True) | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.