Skip to content

Commit 8105934

Browse files
committed
Modernise logging setup
1 parent c772dbc commit 8105934

File tree

9 files changed

+107
-264
lines changed

9 files changed

+107
-264
lines changed

django_lightweight_queue/app_settings.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ def setting(suffix, default):
1111
WORKERS = setting('WORKERS', {})
1212
BACKEND = setting('BACKEND', 'django_lightweight_queue.backends.synchronous.SynchronousBackend')
1313

14+
LOGGER_FACTORY = setting('LOGGER_FACTORY', 'logging.getLogger')
15+
1416
# Allow per-queue overrides of the backend.
1517
BACKEND_OVERRIDES = setting('BACKEND_OVERRIDES', {})
1618

@@ -35,8 +37,4 @@ def setting(suffix, default):
3537
# Workers will export metrics on this port, and ports following it
3638
PROMETHEUS_START_PORT = setting('PROMETHEUS_START_PORT', 9300)
3739

38-
ENABLE_FLUENT_LOGGING = setting('ENABLE_FLUENT_LOGGING', False)
39-
FLUENT_HOST = setting('FLUENT_HOST', '127.0.0.1')
40-
FLUENT_PORT = setting('FLUENT_PORT', 24224)
41-
4240
ATOMIC_JOBS = setting('ATOMIC_JOBS', True)

django_lightweight_queue/cron_scheduler.py

Lines changed: 26 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,50 +1,32 @@
11
import re
22
import imp
33
import time
4-
import logging
54
import datetime
65
import threading
76

87
from django.apps import apps
98
from django.core.management import call_command
109

1110
from .task import task
12-
from .utils import get_backend, configure_logging, \
13-
contribute_implied_queue_name
11+
from .utils import get_logger, get_backend, contribute_implied_queue_name
1412

1513
CRON_QUEUE_NAME = 'cron_scheduler'
1614

1715

1816
class CronScheduler(threading.Thread):
19-
def __init__(self, log_level, log_filename, config):
20-
self.log_level = log_level
21-
self.log_filename = log_filename
17+
def __init__(self, config):
2218
self.config = config
23-
24-
# Logfiles must be opened in child process
25-
self.log = None
26-
19+
self.logger = get_logger('dlq.cron')
2720
super(CronScheduler, self).__init__(daemon=True)
2821

2922
def run(self):
30-
self.log = logging.getLogger()
31-
for x in self.log.handlers:
32-
self.log.removeHandler(x)
33-
34-
configure_logging(
35-
level=self.log_level,
36-
format='%%(asctime)-15s %%(process)d %s %%(levelname).1s: '
37-
'%%(message)s' % (CRON_QUEUE_NAME,),
38-
filename=self.log_filename,
39-
extra={
40-
'queue': CRON_QUEUE_NAME,
41-
},
42-
)
43-
44-
self.log.debug("Starting")
23+
self.logger.debug("Starting cron thread")
4524

4625
backend = get_backend(CRON_QUEUE_NAME)
47-
self.log.info("Loaded backend %s", backend)
26+
self.logger.info(
27+
"Loaded backend {}".format(backend),
28+
extra={'backend': backend},
29+
)
4830

4931
while True:
5032
# This will run until the process terminates.
@@ -55,7 +37,10 @@ def run(self):
5537
time.sleep((1 - time.time() % 1))
5638

5739
def tick(self, backend):
58-
self.log.debug("tick()")
40+
self.logger.debug(
41+
"Cron thread checking for work",
42+
extra={'backend': backend},
43+
)
5944

6045
t = datetime.datetime.utcnow()
6146

@@ -71,7 +56,13 @@ def tick(self, backend):
7156
):
7257
continue
7358

74-
self.log.info("Enqueueing %s", row['command'])
59+
self.logger.debug(
60+
"Enqueueing {}".format(row['command']),
61+
extra={
62+
'target_queue': row['queue'],
63+
'command': row['command'],
64+
},
65+
)
7566

7667
execute(
7768
row['command'],
@@ -82,7 +73,13 @@ def tick(self, backend):
8273
**row.get('command_kwargs', {})
8374
)
8475

85-
self.log.debug("Enqueued %s", row)
76+
self.logger.info(
77+
"Enqueued {}".format(row['command']),
78+
extra={
79+
'target_queue': row['queue'],
80+
'command': row['command'],
81+
},
82+
)
8683

8784

8885
def get_cron_config():

django_lightweight_queue/management/commands/queue_runner.py

Lines changed: 7 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,17 @@
1-
import logging
21
import daemonize
32

43
from django.apps import apps
54
from django.core.management.base import BaseCommand, CommandError
65

7-
from ...machine_types import PooledMachine, DirectlyConfiguredMachine
8-
from ...utils import get_backend, get_middleware, load_extra_config, \
9-
configure_logging
6+
from ...utils import get_logger, get_backend, get_middleware, load_extra_config
107
from ...runner import runner
8+
from ...machine_types import PooledMachine, DirectlyConfiguredMachine
119

1210

1311
class Command(BaseCommand):
1412
def add_arguments(self, parser):
1513
parser.add_argument('--pidfile', action='store', dest='pidfile', default=None,
1614
help="Fork and write pidfile to this file.")
17-
parser.add_argument('--logfile', action='store', dest='logfile', default=None,
18-
help="Log to the specified file.")
1915
parser.add_argument('--touchfile', action='store', dest='touchfile', default=None,
2016
help="touch(1) the specified file after running a job.")
2117
parser.add_argument('--machine', action='store', dest='machine_number', default=None,
@@ -65,59 +61,34 @@ def validate_and_normalise(self, options):
6561
)
6662

6763
def handle(self, **options):
68-
# Django < 1.8.3 leaves options['verbosity'] as a string so we cast to
69-
# ensure an int.
70-
verbosity = int(options['verbosity'])
71-
72-
level = {
73-
0: logging.WARNING,
74-
1: logging.INFO,
75-
2: logging.DEBUG,
76-
}[verbosity]
64+
logger = get_logger('dlq.master')
7765

7866
self.validate_and_normalise(options)
7967

80-
def log_filename(name):
81-
try:
82-
return options['logfile'] % name
83-
except TypeError:
84-
return options['logfile']
85-
8668
def touch_filename(name):
8769
try:
8870
return options['touchfile'] % name
8971
except TypeError:
9072
return None
9173

92-
log_fd = configure_logging(
93-
level=level,
94-
format='%(asctime)-15s %(process)d %(levelname).1s: %(message)s',
95-
filename=log_filename('master'),
96-
extra={
97-
'queue': 'master',
98-
},
99-
)
100-
101-
log = logging.getLogger()
102-
10374
# Configuration overrides
10475
extra_config = options['config']
10576
if extra_config is not None:
10677
load_extra_config(extra_config)
10778

108-
log.info("Starting queue runner")
79+
logger.info("Starting queue master")
10980

11081
# Ensure children will be able to import our backend
11182
get_backend('dummy')
11283

11384
get_middleware()
114-
log.info("Loaded middleware")
85+
logger.debug("Loaded middleware")
11586

11687
# Ensure children will be able to import most things, but also try and
11788
# save memory by importing as much as possible before the fork() as it
11889
# has copy-on-write semantics.
11990
apps.get_models()
120-
log.info("Loaded models")
91+
logger.debug("Loaded models")
12192

12293
if options['exact_configuration']:
12394
machine = DirectlyConfiguredMachine()
@@ -129,12 +100,7 @@ def touch_filename(name):
129100
)
130101

131102
def run():
132-
runner(
133-
log,
134-
log_filename,
135-
touch_filename,
136-
machine,
137-
)
103+
runner(touch_filename, machine, logger)
138104

139105
# fork() only after we have started enough to catch failure, including
140106
# being able to write to our pidfile.
@@ -143,7 +109,6 @@ def run():
143109
app='queue_runner',
144110
pid=options['pidfile'],
145111
action=run,
146-
keep_fds=[log_fd],
147112
)
148113
daemon.start()
149114

django_lightweight_queue/management/commands/queue_worker.py

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,3 @@
1-
import sys
2-
import logging
3-
import argparse
4-
51
from django.core.management.base import BaseCommand
62

73
from ...worker import Worker
@@ -25,18 +21,6 @@ def add_arguments(self, parser):
2521
type=int,
2622
help="port number on which to run Prometheus",
2723
)
28-
parser.add_argument(
29-
'--log-level',
30-
choices=[x.lower() for x in logging._nameToLevel.keys()],
31-
default='warning',
32-
help="log level to set",
33-
)
34-
parser.add_argument(
35-
'--log-file',
36-
type=str,
37-
dest='log_filename',
38-
help="log destination",
39-
)
4024
parser.add_argument(
4125
'--touch-file',
4226
type=str,
@@ -50,17 +34,13 @@ def handle(
5034
queue,
5135
number,
5236
prometheus_port,
53-
log_level,
54-
log_filename,
5537
touch_filename,
5638
**options
5739
):
5840
worker = Worker(
5941
queue=queue,
6042
worker_num=number,
6143
prometheus_port=prometheus_port,
62-
log_level=logging._nameToLevel[log_level.upper()],
63-
log_filename=log_filename,
6444
touch_filename=touch_filename,
6545
)
6646
worker.run()
Lines changed: 8 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,43 +1,19 @@
1-
import json
21
import logging
3-
import traceback
42

5-
log = logging.getLogger(__name__)
3+
logger = logging.getLogger(__name__)
64

7-
class LoggingMiddleware(object):
8-
def process_job(self, job):
9-
log.info("Running job %s", job)
105

11-
self.fluent_log(job, state='processing')
6+
class LoggingMiddleware:
7+
def process_job(self, job, queue, worker_num):
8+
logger.info("Running job {}".format(job))
129

1310
def process_result(self, job, result, duration):
14-
log.info("Finished job => %r (Time taken: %.2fs)",
11+
logger.info("Finished job => {!r} (Time taken: {:.2f}s)".format(
1512
result,
1613
duration,
17-
)
18-
19-
self.fluent_log(job, state='finished', duration=duration)
14+
))
2015

2116
def process_exception(self, job, duration, *exc_info):
22-
exception = ''.join(traceback.format_exception(*exc_info))
23-
24-
log.error("Exception when processing job (duration: %.2fs): %s",
17+
logger.exception("Exception when processing job (duration: {:.2f}s)".format(
2518
duration,
26-
exception,
27-
)
28-
29-
self.fluent_log(
30-
job,
31-
state='exception',
32-
duration=duration,
33-
exception=exception,
34-
)
35-
36-
def fluent_log(self, job, **kwargs):
37-
data = job.as_dict()
38-
39-
data.update(kwargs)
40-
41-
data['fluent_log'] = True
42-
43-
log.info(json.dumps(data))
19+
))

0 commit comments

Comments
 (0)