Skip to content

Commit 2d4c038

Browse files
authored
chore(ci): enabling partial flushing by default (#4041)
## Description Enabling payload partial flush by default Resolves #1632 ## Checklist - [x] Title must conform to [conventional commit](https://github.com/conventional-changelog/commitlint/tree/master/%40commitlint/config-conventional). - [x] Add additional sections for `feat` and `fix` pull requests. - [x] Ensure tests are passing for affected code. - [x] [Library documentation](https://github.com/DataDog/dd-trace-py/tree/1.x/docs) and/or [Datadog's documentation site](https://github.com/DataDog/documentation/) is updated. Link to doc PR in description. ## Reviewer Checklist - [ ] Title is accurate. - [ ] Description motivates each change. - [ ] No unnecessary changes were introduced in this PR. - [ ] PR cannot be broken up into smaller PRs. - [ ] Avoid breaking [API](https://ddtrace.readthedocs.io/en/stable/versioning.html#interfaces) changes unless absolutely necessary. - [ ] Tests provided or description of manual testing performed is included in the code or PR. - [ ] Release note has been added for fixes and features, or else `changelog/no-changelog` label added. - [ ] All relevant GitHub issues are correctly linked. - [ ] Backports are identified and tagged with Mergifyio. - [ ] Add to milestone.
1 parent 7a95e9b commit 2d4c038

File tree

5 files changed

+82
-5
lines changed

5 files changed

+82
-5
lines changed

ddtrace/tracer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ def __init__(
215215
)
216216
self._single_span_sampling_rules = get_span_sampling_rules() # type: List[SpanSamplingRule]
217217
self._writer = writer # type: TraceWriter
218-
self._partial_flush_enabled = asbool(os.getenv("DD_TRACE_PARTIAL_FLUSH_ENABLED", default=False))
218+
self._partial_flush_enabled = asbool(os.getenv("DD_TRACE_PARTIAL_FLUSH_ENABLED", default=True))
219219
self._partial_flush_min_spans = int(os.getenv("DD_TRACE_PARTIAL_FLUSH_MIN_SPANS", default=500))
220220
self._appsec_enabled = config._appsec_enabled
221221

docs/configuration.rst

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -233,6 +233,12 @@ below:
233233
- 512
234234
- The maximum length of ``x-datadog-tags`` header allowed in the Datadog propagation style. Must be a value between 0 to 512. If 0, propagation of ``x-datadog-tags`` is disabled.
235235

236+
.. _dd-trace-partial-flush-enabled:
237+
* - ``DD_TRACE_PARTIAL_FLUSH_ENABLED``
238+
- Boolean
239+
- True
240+
- Prevents large payloads being sent to APM.
241+
236242
.. _dd-profiling-enabled:
237243
* - ``DD_PROFILING_ENABLED``
238244
- Boolean

tests/commands/test_runner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -410,7 +410,7 @@ def test_info_no_configs():
410410
Log injection enabled: False
411411
Health metrics enabled: False
412412
Priority sampling enabled: True
413-
Partial flushing enabled: False
413+
Partial flushing enabled: True
414414
Partial flush minimum number of spans: 500
415415
\x1b[92m\x1b[1mTagging:\x1b[0m
416416
DD Service: None

tests/contrib/asyncio/test_tracer_safety.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,11 +45,12 @@ async def coro():
4545
with tracer.trace("coroutine"):
4646
await asyncio.sleep(0.01)
4747

48-
futures = [asyncio.ensure_future(coro()) for x in range(1000)]
48+
# partial flushing is enabled, ensure the number of spans generated is less than 500
49+
futures = [asyncio.ensure_future(coro()) for x in range(400)]
4950
for future in futures:
5051
await future
5152

5253
# the trace is wrong but the Context is finished
5354
traces = tracer.pop_traces()
5455
assert 1 == len(traces)
55-
assert 1000 == len(traces[0])
56+
assert 400 == len(traces[0])

tests/integration/test_integration.py

Lines changed: 71 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
# -*- coding: utf-8 -*-
12
import itertools
23
import logging
34
import os
@@ -219,6 +220,47 @@ def test_metrics(encoding, monkeypatch):
219220

220221
with override_global_config(dict(health_metrics_enabled=True)):
221222
t = Tracer()
223+
assert t._partial_flush_min_spans == 500
224+
statsd_mock = mock.Mock()
225+
t._writer.dogstatsd = statsd_mock
226+
assert t._writer._report_metrics
227+
with mock.patch("ddtrace.internal.writer.log") as log:
228+
for _ in range(5):
229+
spans = []
230+
for i in range(3000):
231+
spans.append(t.trace("op"))
232+
# Since _partial_flush_min_spans is set to 500 we will flush spans in 6 batches
233+
# each batch will contain 500 spans
234+
for s in spans:
235+
s.finish()
236+
237+
t.shutdown()
238+
log.warning.assert_not_called()
239+
log.error.assert_not_called()
240+
241+
statsd_mock.distribution.assert_has_calls(
242+
[
243+
mock.call("datadog.tracer.http.sent.bytes", AnyInt()),
244+
mock.call("datadog.tracer.http.sent.traces", 30),
245+
mock.call("datadog.tracer.writer.accepted.traces", 30, tags=[]),
246+
mock.call("datadog.tracer.buffer.accepted.traces", 30, tags=[]),
247+
mock.call("datadog.tracer.buffer.accepted.spans", 15000, tags=[]),
248+
mock.call("datadog.tracer.http.requests", 1, tags=[]),
249+
mock.call("datadog.tracer.http.sent.bytes", AnyInt(), tags=[]),
250+
],
251+
any_order=True,
252+
)
253+
254+
255+
@allencodings
256+
def test_metrics_partial_flush_disabled(encoding, monkeypatch):
257+
monkeypatch.setenv("DD_TRACE_API_VERSION", encoding)
258+
259+
with override_global_config(dict(health_metrics_enabled=True)):
260+
t = Tracer()
261+
t.configure(
262+
partial_flush_enabled=False,
263+
)
222264
statsd_mock = mock.Mock()
223265
t._writer.dogstatsd = statsd_mock
224266
assert t._writer._report_metrics
@@ -248,8 +290,37 @@ def test_metrics(encoding, monkeypatch):
248290
@allencodings
249291
def test_single_trace_too_large(encoding, monkeypatch):
250292
monkeypatch.setenv("DD_TRACE_API_VERSION", encoding)
293+
# setting writer interval to 5 seconds so that buffer can fit larger traces
294+
monkeypatch.setenv("DD_TRACE_WRITER_INTERVAL_SECONDS", "5.0")
251295

252296
t = Tracer()
297+
assert t._partial_flush_enabled is True
298+
with mock.patch("ddtrace.internal.writer.log") as log:
299+
key = "a" * 250
300+
with t.trace("huge"):
301+
for i in range(200000):
302+
with t.trace("operation") as s:
303+
# Need to make the strings unique so that the v0.5 encoding doesn’t compress the data
304+
s.set_tag(key + str(i), key + str(i))
305+
t.shutdown()
306+
log.warning.assert_any_call(
307+
"trace buffer (%s traces %db/%db) cannot fit trace of size %db, dropping",
308+
AnyInt(),
309+
AnyInt(),
310+
AnyInt(),
311+
AnyInt(),
312+
)
313+
log.error.assert_not_called()
314+
315+
316+
@allencodings
317+
def test_single_trace_too_large_partial_flush_disabled(encoding, monkeypatch):
318+
monkeypatch.setenv("DD_TRACE_API_VERSION", encoding)
319+
320+
t = Tracer()
321+
t.configure(
322+
partial_flush_enabled=False,
323+
)
253324
with mock.patch("ddtrace.internal.writer.log") as log:
254325
with t.trace("huge"):
255326
for i in range(200000):
@@ -717,7 +788,6 @@ def test_partial_flush_log(run_python_code_in_subprocess, encoding, monkeypatch)
717788
t = Tracer()
718789

719790
t.configure(
720-
partial_flush_enabled=True,
721791
partial_flush_min_spans=partial_flush_min_spans,
722792
)
723793

0 commit comments

Comments
 (0)