Skip to content

Commit c5e4b7e

Browse files
authored
Remove Max Dequeue Limit (#616)
Scale testing revealed this limit is unnecessary and confusing. In practice, we recommend setting queue concurrency limits if tasks are resource-intensive.
1 parent 31930bf commit c5e4b7e

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

dbos/_sys_db.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
import functools
44
import json
55
import random
6+
import sys
67
import threading
78
import time
89
from abc import ABC, abstractmethod
@@ -2513,7 +2514,7 @@ def start_queued_workflows(
25132514
return []
25142515

25152516
# Compute max_tasks, the number of workflows that can be dequeued given local and global concurrency limits,
2516-
max_tasks = 100 # To minimize contention with large queues, never dequeue more than 100 tasks
2517+
max_tasks = sys.maxsize
25172518
if queue.worker_concurrency is not None or queue.concurrency is not None:
25182519
# Count how many workflows on this queue are currently PENDING both locally and globally.
25192520
pending_tasks_query = (
@@ -2597,7 +2598,8 @@ def start_queued_workflows(
25972598
)
25982599
else:
25992600
query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
2600-
query = query.limit(int(max_tasks))
2601+
if max_tasks != sys.maxsize:
2602+
query = query.limit(int(max_tasks))
26012603

26022604
rows = c.execute(query).fetchall()
26032605

0 commit comments

Comments
 (0)