Skip to content

Commit c657fce

Browse files
authored
Revert "Allow turning on JSON Detection in StackDriver" (#6352)
This reverts commit ef038bf. (PR #6293)
1 parent ef038bf commit c657fce

File tree

2 files changed

+6
-45
lines changed

2 files changed

+6
-45
lines changed

google/cloud/logging/handlers/transports/background_thread.py

Lines changed: 6 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -92,18 +92,12 @@ class _Worker(object):
9292
than the grace_period. This means this is effectively the longest
9393
amount of time the background thread will hold onto log entries
9494
before sending them to the server.
95-
96-
:type includer_logger_name: bool
97-
:param include_logger_name: (optional) Include python_logger field in
98-
jsonPayload. Turn this off to enable json detection in log messages.
9995
"""
10096

10197
def __init__(self, cloud_logger, grace_period=_DEFAULT_GRACE_PERIOD,
10298
max_batch_size=_DEFAULT_MAX_BATCH_SIZE,
103-
max_latency=_DEFAULT_MAX_LATENCY,
104-
include_logger_name=True):
99+
max_latency=_DEFAULT_MAX_LATENCY):
105100
self._cloud_logger = cloud_logger
106-
self._include_logger_name = include_logger_name
107101
self._grace_period = grace_period
108102
self._max_batch_size = max_batch_size
109103
self._max_latency = max_latency
@@ -259,21 +253,17 @@ def enqueue(self, record, message, resource=None, labels=None,
259253
:param span_id: (optional) span_id within the trace for the log entry.
260254
Specify the trace parameter if span_id is set.
261255
"""
262-
263-
log_record = {
256+
self._queue.put_nowait({
264257
'info': {
265258
'message': message,
259+
'python_logger': record.name,
266260
},
267261
'severity': record.levelname,
268262
'resource': resource,
269263
'labels': labels,
270264
'trace': trace,
271265
'span_id': span_id,
272-
}
273-
274-
if self._include_logger_name:
275-
log_record['info']['python_logger'] = record.name
276-
self._queue.put_nowait(log_record)
266+
})
277267

278268
def flush(self):
279269
"""Submit any pending log records."""
@@ -303,24 +293,17 @@ class BackgroundThreadTransport(Transport):
303293
than the grace_period. This means this is effectively the longest
304294
amount of time the background thread will hold onto log entries
305295
before sending them to the server.
306-
307-
:type includer_logger_name: bool
308-
:param include_logger_name: (optional) Include python_logger field in
309-
jsonPayload. Turn this off to enable jso
310-
detection in log messages.
311296
"""
312297

313298
def __init__(self, client, name, grace_period=_DEFAULT_GRACE_PERIOD,
314299
batch_size=_DEFAULT_MAX_BATCH_SIZE,
315-
max_latency=_DEFAULT_MAX_LATENCY,
316-
include_logger_name=True):
300+
max_latency=_DEFAULT_MAX_LATENCY):
317301
self.client = client
318302
logger = self.client.logger(name)
319303
self.worker = _Worker(logger,
320304
grace_period=grace_period,
321305
max_batch_size=batch_size,
322-
max_latency=max_latency,
323-
include_logger_name=include_logger_name)
306+
max_latency=max_latency)
324307
self.worker.start()
325308

326309
def send(self, record, message, resource=None, labels=None,

tests/unit/handlers/transports/test_background_thread.py

Lines changed: 0 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,6 @@ def test_constructor(self):
175175
self.assertEqual(worker._grace_period, grace_period)
176176
self.assertEqual(worker._max_batch_size, max_batch_size)
177177
self.assertEqual(worker._max_latency, max_latency)
178-
self.assertTrue(worker._include_logger_name)
179178
self.assertFalse(worker.is_alive)
180179
self.assertIsNone(worker._thread)
181180

@@ -283,23 +282,6 @@ def test__thread_main(self):
283282
self.assertEqual(worker._cloud_logger._batch.commit_count, 2)
284283
self.assertEqual(worker._queue.qsize(), 0)
285284

286-
def test__thread_main_no_python_logger(self):
287-
from google.cloud.logging.handlers.transports import background_thread
288-
289-
worker = self._make_one(_Logger(self.NAME), include_logger_name=False)
290-
self.assertFalse(worker._include_logger_name)
291-
292-
# Enqueue one record and the termination signal.
293-
self._enqueue_record(worker, '1')
294-
worker._queue.put_nowait(background_thread._WORKER_TERMINATOR)
295-
296-
worker._thread_main()
297-
298-
self.assertEqual(len(worker._cloud_logger._batch.all_entries), 1)
299-
self.assertFalse(
300-
'python_logger' in worker._cloud_logger._batch.all_entries[0]
301-
)
302-
303285
def test__thread_main_error(self):
304286
from google.cloud.logging.handlers.transports import background_thread
305287

@@ -439,12 +421,9 @@ def join(self, timeout=None):
439421
class _Batch(object):
440422

441423
def __init__(self):
442-
# Entries waiting to be committed
443424
self.entries = []
444425
self.commit_called = False
445426
self.commit_count = None
446-
# All entries ever committed via this _Batch
447-
self.all_entries = []
448427

449428
def log_struct(
450429
self, info, severity=logging.INFO, resource=None, labels=None,
@@ -457,7 +436,6 @@ def log_struct(
457436
self.log_struct_called_with = (info, severity, resource, labels,
458437
trace, span_id)
459438
self.entries.append(info)
460-
self.all_entries.append(info)
461439

462440
def commit(self):
463441
self.commit_called = True

0 commit comments

Comments
 (0)