Skip to content

Commit ca4ac0a

Browse files
committed
chore: adjust number of workers
1 parent f8394a5 commit ca4ac0a

File tree

3 files changed

+16
-8
lines changed

3 files changed

+16
-8
lines changed

webgenie/constants.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import bittensor as bt
22
import os
3-
3+
import psutil
44
# Change this value when updating your code base.
55
# Define the version of the webgenie.
66
__VERSION__ = "1.1.18" # version
@@ -88,3 +88,11 @@
8888
NEURON_EPOCH_LENGTH = int(os.getenv("NEURON_EPOCH_LENGTH", 25)) # neuron epoch length
8989

9090
MAX_NUMBER_OF_TASKS_PER_SESSION = 18 # max number of tasks per session
91+
92+
NUMBER_OF_CONCURRENT_WORKERS = max(
93+
1,
94+
min(
95+
os.cpu_count(),
96+
(psutil.virtual_memory().total) // (1024 * 1024 * 1024 * 4)
97+
)
98+
)

webgenie/rewards/lighthouse_reward/lighthouse_reward.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,9 @@
77
import numpy as np
88
from typing import List
99

10+
from webgenie.constants import NUMBER_OF_CONCURRENT_WORKERS
1011
from webgenie.rewards.reward import Reward
1112
from webgenie.tasks import Task, Solution
12-
1313
from .get_lighthouse_score import get_lighthouse_score
1414

1515

@@ -40,9 +40,9 @@ async def reward(self, task: Task, solutions: List[Solution]) -> np.ndarray:
4040
htmls = [solution.html for solution in solutions]
4141

4242
# Use ProcessPoolExecutor for parallel processing
43-
with multiprocessing.Pool(processes=os.cpu_count()) as pool:
43+
with multiprocessing.Pool(processes=NUMBER_OF_CONCURRENT_WORKERS) as pool:
4444
# Convert solutions into chunks for parallel processing
45-
chunk_size = max(1, len(htmls) // os.cpu_count())
45+
chunk_size = max(1, len(htmls) // NUMBER_OF_CONCURRENT_WORKERS)
4646

4747
html_chunks = [htmls[i:i + chunk_size] for i in range(0, len(htmls), chunk_size)]
4848

webgenie/rewards/visual_reward/visual_reward.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from datetime import datetime
1212
from typing import List
1313

14-
from webgenie.constants import WORK_DIR
14+
from webgenie.constants import WORK_DIR, NUMBER_OF_CONCURRENT_WORKERS
1515
from webgenie.rewards.reward import Reward
1616
from webgenie.rewards.visual_reward.common.browser import start_browser, stop_browser
1717
from webgenie.rewards.visual_reward.high_level_matching_score import high_level_matching_score
@@ -80,11 +80,11 @@ async def reward(self, task: Task, solutions: List[Solution]) -> np.ndarray:
8080
current_work_dir = f"{WORK_DIR}/task_{timestamp}_{task.task_id}"
8181
os.makedirs(current_work_dir, exist_ok=True)
8282

83-
bt.logging.info(f"The number of cpu cores: {os.cpu_count()}")
83+
bt.logging.info(f"The number of concurrent workers: {NUMBER_OF_CONCURRENT_WORKERS}")
8484
# Use ProcessPoolExecutor for parallel processing
85-
with multiprocessing.Pool(processes=os.cpu_count()) as pool:
85+
with multiprocessing.Pool(processes=NUMBER_OF_CONCURRENT_WORKERS) as pool:
8686
# Convert solutions into chunks for parallel processing
87-
chunk_size = max(1, len(solutions) // os.cpu_count())
87+
chunk_size = max(1, len(solutions) // NUMBER_OF_CONCURRENT_WORKERS)
8888
solution_chunks = [solutions[i:i + chunk_size] for i in range(0, len(solutions), chunk_size)]
8989

9090
# Create partial tasks for each chunk

0 commit comments

Comments
 (0)