@@ -7,12 +7,13 @@ logfile=/var/log/supervisord.log
77# Cannot place this in Celery for now because Celery must run as a single process (see note below)
88# Indexing uses multi-processing to speed things up
99[program:document_indexing]
10- environment=CURRENT_PROCESS_IS_AN_INDEXING_JOB=true,LOG_FILE_NAME=document_indexing
10+ environment=CURRENT_PROCESS_IS_AN_INDEXING_JOB=true
1111command=python danswer/background/update.py
12+ stdout_logfile=/var/log/document_indexing.log
13+ stdout_logfile_maxbytes=16MB
1214redirect_stderr=true
1315autorestart=true
1416
15-
1617# Background jobs that must be run async due to long time to completion
1718# NOTE: due to an issue with Celery + SQLAlchemy
1819# (https://github.com/celery/celery/issues/7007#issuecomment-1740139367)
@@ -84,7 +85,8 @@ stopasgroup=true
8485# More details on setup here: https://docs.danswer.dev/slack_bot_setup
8586[program:slack_bot]
8687command=python danswer/danswerbot/slack/listener.py
87- environment=LOG_FILE_NAME=slack_bot
88+ stdout_logfile=/var/log/slack_bot.log
89+ stdout_logfile_maxbytes=16MB
8890redirect_stderr=true
8991autorestart=true
9092startretries=5
@@ -98,8 +100,8 @@ command=tail -qF
98100 /var/log/celery_worker_primary.log
99101 /var/log/celery_worker_light.log
100102 /var/log/celery_worker_heavy.log
101- /var/log/document_indexing_info .log
102- /var/log/slack_bot_debug .log
103+ /var/log/document_indexing .log
104+ /var/log/slack_bot .log
103105stdout_logfile=/dev/stdout
104106stdout_logfile_maxbytes = 0 # must be set to 0 when stdout_logfile=/dev/stdout
105107autorestart=true
0 commit comments