Skip to content

Commit 2842889

Browse files
committed
#338: optimize for one core setting
also sets celery worker scaling back to one per core as previous test results proved diminishing returns of multiple workers per core (at least in one core setting)
1 parent 819e65e commit 2842889

File tree

3 files changed

+16
-9
lines changed

3 files changed

+16
-9
lines changed

backend/dps_training_k/configuration/settings.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -184,6 +184,7 @@
184184
"schedule": 1.0,
185185
},
186186
}
187+
CELERY_WORKER_CONCURRENCY = env.int("CELERY_WORKER_CONCURRENCY", default=1)
187188

188189
DATA_ROOT = os.path.join(BASE_DIR, "data")
189190

backend/dps_training_k/deployment/django/celery/worker/start

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,16 @@
33
set -o errexit
44
set -o nounset
55

6-
# Allow override via env var, otherwise 4x CPU cores for I/O-bound tasks,
7-
# capped at 32 to stay within PostgreSQL's default max_connections (100) with headroom for Django and CeleryBeat.
6+
# Use override if set, otherwise default to nproc (1 worker per core), capped at 32
7+
# to stay within PostgreSQL's default max_connections (100) with headroom for Django and CeleryBeat.
88
if [ -z "${CELERY_WORKER_CONCURRENCY:-}" ]; then
9-
CPU_COUNT=$(nproc)
10-
CONCURRENCY=$(( CPU_COUNT * 4 ))
9+
CONCURRENCY=$(nproc)
1110
if [ "$CONCURRENCY" -gt 32 ]; then
1211
CONCURRENCY=32
1312
fi
1413
else
1514
CONCURRENCY=$CELERY_WORKER_CONCURRENCY
1615
fi
1716

17+
export CELERY_WORKER_CONCURRENCY=$CONCURRENCY
1818
watchgod celery.__main__.main --args -A configuration.celery_app worker -l INFO -c $CONCURRENCY

backend/dps_training_k/game/tasks.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,14 @@ def check_for_updates():
3838
event_ids = list(claimed.values_list('id', flat=True))
3939
ScheduledEvent.objects.filter(id__in=event_ids).update(enqueued=True)
4040

41-
for event_id in event_ids:
42-
try:
43-
run_event.apply_async(args=[event_id]) # add event to celery queue for a worker to pick up
44-
except Exception as e:
45-
logging.error(f"failed to enqueue event {event_id}: {e}")
41+
if getattr(settings, 'CELERY_WORKER_CONCURRENCY', 0) == 1:
42+
# Single worker — execute inline to avoid async dispatch overhead
43+
for event_id in event_ids:
44+
run_event.apply(args=[event_id])
45+
else:
46+
# Multiple workers — dispatch for parallel processing
47+
for event_id in event_ids:
48+
try:
49+
run_event.apply_async(args=[event_id])
50+
except Exception as e:
51+
logging.error(f"failed to enqueue event {event_id}: {e}")

0 commit comments

Comments
 (0)