17
17
18
18
from concurrent .futures import ThreadPoolExecutor , ProcessPoolExecutor
19
19
20
- from tornado import web , httpserver , ioloop , log
20
+ from tornado import web , httpserver , ioloop
21
+ from tornado .log import access_log , app_log , LogFormatter
22
+ from tornado .curl_httpclient import curl_log
21
23
22
24
import tornado .options
23
25
from tornado .options import define , options
50
52
#-----------------------------------------------------------------------------
51
53
# Code
52
54
#-----------------------------------------------------------------------------
53
- access_log = log .access_log
54
- app_log = log .app_log
55
55
56
56
here = os .path .dirname (__file__ )
57
57
pjoin = os .path .join
@@ -79,6 +79,17 @@ class NBViewer(Application):
79
79
80
80
name = Unicode ('nbviewer' )
81
81
82
+ aliases = Dict ({
83
+ 'log-level' : 'Application.log_level' ,
84
+ })
85
+
86
+ flags = Dict ({
87
+ 'debug' : (
88
+ {'Application' : {'log_level' : logging .DEBUG }},
89
+ "Set log-level to debug, for the most verbose logging."
90
+ ),
91
+ })
92
+
82
93
# Use this to insert custom configuration of handlers for NBViewer extensions
83
94
handler_settings = Dict ().tag (config = True )
84
95
@@ -97,20 +108,20 @@ class NBViewer(Application):
97
108
client = Any ().tag (config = True )
98
109
@default ('client' )
99
110
def _default_client (self ):
100
- client = HTTPClientClass ()
111
+ client = HTTPClientClass (log = self . log )
101
112
client .cache = self .cache
102
113
return client
103
114
104
115
index = Any ().tag (config = True )
105
116
@default ('index' )
106
117
def _load_index (self ):
107
118
if os .environ .get ('NBINDEX_PORT' ):
108
- log . app_log .info ("Indexing notebooks" )
119
+ self . log .info ("Indexing notebooks" )
109
120
tcp_index = os .environ .get ('NBINDEX_PORT' )
110
121
index_url = tcp_index .split ('tcp://' )[1 ]
111
122
index_host , index_port = index_url .split (":" )
112
123
else :
113
- log . app_log .info ("Not indexing notebooks" )
124
+ self . log .info ("Not indexing notebooks" )
114
125
indexer = NoSearch ()
115
126
return indexer
116
127
@@ -149,7 +160,7 @@ def cache(self):
149
160
tcp_memcache = os .environ .get ('NBCACHE_PORT' )
150
161
memcache_urls = tcp_memcache .split ('tcp://' )[1 ]
151
162
if options .no_cache :
152
- log . app_log .info ("Not using cache" )
163
+ self . log .info ("Not using cache" )
153
164
cache = MockCache ()
154
165
elif pylibmc and memcache_urls :
155
166
# setup memcache
@@ -161,13 +172,13 @@ def cache(self):
161
172
kwargs ['binary' ] = True
162
173
kwargs ['username' ] = username
163
174
kwargs ['password' ] = password
164
- log . app_log .info ("Using SASL memcache" )
175
+ self . log .info ("Using SASL memcache" )
165
176
else :
166
- log . app_log .info ("Using plain memcache" )
177
+ self . log .info ("Using plain memcache" )
167
178
168
179
cache = AsyncMultipartMemcache (memcache_urls .split (',' ), ** kwargs )
169
180
else :
170
- log . app_log .info ("Using in-memory cache" )
181
+ self . log .info ("Using in-memory cache" )
171
182
cache = DummyAsyncCache ()
172
183
173
184
return cache
@@ -179,7 +190,7 @@ def env(self):
179
190
try :
180
191
git_data = git_info (here )
181
192
except Exception as e :
182
- app_log .error ("Failed to get git info: %s" , e )
193
+ self . log .error ("Failed to get git info: %s" , e )
183
194
git_data = {}
184
195
else :
185
196
git_data ['msg' ] = escape (git_data ['msg' ])
@@ -196,18 +207,18 @@ def fetch_kwargs(self):
196
207
fetch_kwargs = dict (connect_timeout = 10 ,)
197
208
if options .proxy_host :
198
209
fetch_kwargs .update (proxy_host = options .proxy_host , proxy_port = options .proxy_port )
199
- log . app_log .info ("Using web proxy {proxy_host}:{proxy_port}."
210
+ self . log .info ("Using web proxy {proxy_host}:{proxy_port}."
200
211
"" .format (** fetch_kwargs ))
201
212
202
213
if options .no_check_certificate :
203
214
fetch_kwargs .update (validate_cert = False )
204
- log . app_log .info ("Not validating SSL certificates" )
215
+ self . log .info ("Not validating SSL certificates" )
205
216
206
217
return fetch_kwargs
207
218
208
219
@cached_property
209
220
def formats (self ):
210
- return self .configure_formats (log . app_log )
221
+ return self .configure_formats ()
211
222
212
223
# load frontpage sections
213
224
@cached_property
@@ -223,6 +234,29 @@ def frontpage_setup(self):
223
234
}
224
235
return frontpage_setup
225
236
237
+ # Attribute inherited from traitlets.config.Application, automatically used to style logs
238
+ # https://github.com/ipython/traitlets/blob/master/traitlets/config/application.py#L191
239
+ _log_formatter_cls = LogFormatter
240
+ # Need Tornado LogFormatter for color logs, keys 'color' and 'end_color' in log_format
241
+
242
+ # Observed traitlet inherited again from traitlets.config.Application
243
+ # https://github.com/ipython/traitlets/blob/master/traitlets/config/application.py#L177
244
+ @default ('log_level' )
245
+ def _log_level_default (self ):
246
+ return logging .INFO
247
+
248
+ # Ditto the above: https://github.com/ipython/traitlets/blob/master/traitlets/config/application.py#L197
249
+ @default ('log_format' )
250
+ def _log_format_default (self ):
251
+ """override default log format to include time and color, plus always display the log level, not just when it's high"""
252
+ return "%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s %(module)s:%(lineno)d]%(end_color)s %(message)s"
253
+
254
+ # For consistency with JupyterHub logs
255
+ @default ('log_datefmt' )
256
+ def _log_datefmt_default (self ):
257
+ """Exclude date from default date format"""
258
+ return "%Y-%m-%d %H:%M:%S"
259
+
226
260
@cached_property
227
261
def pool (self ):
228
262
if self .processes :
@@ -240,23 +274,25 @@ def rate_limiter(self):
240
274
def static_paths (self ):
241
275
default_static_path = pjoin (here , 'static' )
242
276
if self .static_path :
243
- log . app_log .info ("Using custom static path {}" .format (self .static_path ))
277
+ self . log .info ("Using custom static path {}" .format (self .static_path ))
244
278
static_paths = [self .static_path , default_static_path ]
245
279
else :
246
280
static_paths = [default_static_path ]
281
+
247
282
return static_paths
248
283
249
284
@cached_property
250
285
def template_paths (self ):
251
286
default_template_path = pjoin (here , 'templates' )
252
287
if options .template_path is not None :
253
- log . app_log .info ("Using custom template path {}" .format (options .template_path ))
288
+ self . log .info ("Using custom template path {}" .format (options .template_path ))
254
289
template_paths = [options .template_path , default_template_path ]
255
290
else :
256
291
template_paths = [default_template_path ]
292
+
257
293
return template_paths
258
294
259
- def configure_formats (self , log , formats = None ):
295
+ def configure_formats (self , formats = None ):
260
296
"""
261
297
Format-specific configuration.
262
298
"""
@@ -277,8 +313,8 @@ def configure_formats(self, log, formats=None):
277
313
# can't pickle exporter instances,
278
314
formats [key ]["exporter" ] = exporter_cls
279
315
else :
280
- formats [key ]["exporter" ] = exporter_cls (config = self .config , log = log )
281
-
316
+ formats [key ]["exporter" ] = exporter_cls (config = self .config , log = self . log )
317
+
282
318
return formats
283
319
284
320
def init_tornado_application (self ):
@@ -305,8 +341,7 @@ def init_tornado_application(self):
305
341
306
342
# DEBUG env implies both autoreload and log-level
307
343
if os .environ .get ("DEBUG" ):
308
- options .debug = True
309
- logging .getLogger ().setLevel (logging .DEBUG )
344
+ self .log .setLevel (logging .DEBUG )
310
345
311
346
# input traitlets to settings
312
347
settings = dict (
@@ -337,6 +372,7 @@ def init_tornado_application(self):
337
372
localfile_any_user = options .localfile_any_user ,
338
373
localfile_follow_symlinks = options .localfile_follow_symlinks ,
339
374
localfile_path = os .path .abspath (options .localfiles ),
375
+ log = self .log ,
340
376
log_function = log_request ,
341
377
mathjax_url = options .mathjax_url ,
342
378
max_cache_uris = self .max_cache_uris ,
@@ -355,10 +391,35 @@ def init_tornado_application(self):
355
391
)
356
392
357
393
if options .localfiles :
358
- log . app_log .warning ("Serving local notebooks in %s, this can be a security risk" , options .localfiles )
359
-
394
+ self . log .warning ("Serving local notebooks in %s, this can be a security risk" , options .localfiles )
395
+
360
396
# create the app
361
- self .tornado_application = web .Application (handlers , debug = options .debug , ** settings )
397
+ self .tornado_application = web .Application (handlers , ** settings )
398
+
399
+ def init_logging (self ):
400
+
401
+ # Note that we inherit a self.log attribute from traitlets.config.Application
402
+ # https://github.com/ipython/traitlets/blob/master/traitlets/config/application.py#L209
403
+ # as well as a log_level attribute
404
+ # https://github.com/ipython/traitlets/blob/master/traitlets/config/application.py#L177
405
+
406
+ # This prevents double log messages because tornado use a root logger that
407
+ # self.log is a child of. The logging module dispatches log messages to a log
408
+ # and all of its ancestors until propagate is set to False
409
+ self .log .propagate = False
410
+
411
+ tornado_log = logging .getLogger ('tornado' )
412
+ # hook up tornado's loggers to our app handlers
413
+ for log in (app_log , access_log , tornado_log , curl_log ):
414
+ # ensure all log statements identify the application they come from
415
+ log .name = self .log .name
416
+ log .parent = self .log
417
+ log .propagate = True
418
+ log .setLevel (self .log_level )
419
+
420
+ # disable curl debug, which logs all headers, info for upstream requests, which is TOO MUCH
421
+ curl_log .setLevel (
422
+ max (self .log_level , logging .INFO ))
362
423
363
424
# Mostly copied from JupyterHub because if it isn't broken then don't fix it
364
425
def write_config_file (self ):
@@ -401,6 +462,7 @@ def __init__(self, *args, **kwargs):
401
462
402
463
# Inherited method from traitlets.Application
403
464
self .load_config_file (options .config_file )
465
+ self .init_logging ()
404
466
self .init_tornado_application ()
405
467
406
468
def init_options ():
@@ -426,7 +488,7 @@ def default_endpoint():
426
488
define ("cache_expiry_min" , default = 10 * 60 , help = "minimum cache expiry (seconds)" , type = int )
427
489
define ("config_file" , default = 'nbviewer_config.py' , help = "The config file to load" , type = str )
428
490
define ("content_security_policy" , default = "connect-src 'none';" , help = "Content-Security-Policy header setting" , type = str )
429
- define ("debug" , default = False , help = "run in debug mode" , type = bool )
491
+ # define("debug", default=False, help="run in debug mode", type=bool)
430
492
define ("default_format" , default = "html" , help = "format to use for legacy / URLs" , type = str )
431
493
define ("frontpage" , default = FRONTPAGE_JSON , help = "path to json file containing frontpage content" , type = str )
432
494
define ("generate_config" , default = False , help = "Generate default config file and then stop." , type = bool )
@@ -462,16 +524,6 @@ def main(argv=None):
462
524
init_options ()
463
525
tornado .options .parse_command_line (argv )
464
526
465
- try :
466
- from tornado .curl_httpclient import curl_log
467
- except ImportError as e :
468
- log .app_log .warning ("Failed to import curl: %s" , e )
469
- else :
470
- # debug-level curl_log logs all headers, info for upstream requests,
471
- # which is just too much.
472
- curl_log .setLevel (max (log .app_log .getEffectiveLevel (), logging .INFO ))
473
-
474
-
475
527
# create and start the app
476
528
nbviewer = NBViewer ()
477
529
app = nbviewer .tornado_application
@@ -485,7 +537,7 @@ def main(argv=None):
485
537
}
486
538
487
539
http_server = httpserver .HTTPServer (app , xheaders = True , ssl_options = ssl_options )
488
- log . app_log .info ("Listening on %s:%i, path %s" , options .host , options .port ,
540
+ nbviewer . log .info ("Listening on %s:%i, path %s" , options .host , options .port ,
489
541
app .settings ['base_url' ])
490
542
http_server .listen (options .port , options .host )
491
543
ioloop .IOLoop .current ().start ()
0 commit comments