Skip to content

Commit 914cbca

Browse files
authored
Merge pull request #846 from DataDog/0.23-dev
Release 0.23.0
2 parents 7b34bb8 + ddf7371 commit 914cbca

File tree

131 files changed

+3923
-1241
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

131 files changed

+3923
-1241
lines changed

.circleci/config.yml

Lines changed: 27 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -62,11 +62,14 @@ jobs:
6262
# Install required dependencies
6363
# DEV: `pyopenssl` needed until the following PR is released
6464
# https://github.com/pypa/twine/pull/447
65-
- run: pip install twine readme_renderer[md] pyopenssl
65+
# DEV: `wheel` is needed to run `bdist_wheel`
66+
- run: pip install twine readme_renderer[md] pyopenssl wheel
6667
# Ensure we didn't cache from previous runs
6768
- run: rm -rf dist/
68-
# Ensure package will build
69+
# Ensure source package will build
6970
- run: python setup.py sdist
71+
# Ensure wheel will build
72+
- run: python setup.py bdist_wheel
7073
# Ensure package long description is valid and will render
7174
# https://github.com/pypa/twine/tree/6c4d5ecf2596c72b89b969ccc37b82c160645df8#twine-check
7275
- run: twine check dist/*
@@ -86,6 +89,20 @@ jobs:
8689
- tracer.results
8790
- *save_cache_step
8891

92+
internal:
93+
docker:
94+
- *test_runner
95+
resource_class: *resource_class
96+
steps:
97+
- checkout
98+
- *restore_cache_step
99+
- run: tox -e '{py27,py34,py35,py36}-internal' --result-json /tmp/internal.results
100+
- persist_to_workspace:
101+
root: /tmp
102+
paths:
103+
- internal.results
104+
- *save_cache_step
105+
89106
opentracer:
90107
docker:
91108
- *test_runner
@@ -312,11 +329,13 @@ jobs:
312329
steps:
313330
- checkout
314331
- *restore_cache_step
315-
- run: tox -e 'celery_contrib-{py27,py34,py35,py36}-celery{31,40,41,42}-redis{210}' --result-json /tmp/celery.results
332+
- run: tox -e 'celery_contrib-{py27,py34,py35,py36}-celery{31}-redis{210}' --result-json /tmp/celery.1.results
333+
- run: tox -e 'celery_contrib-{py27,py34,py35,py36}-celery{40,41,42}-{redis210-kombu43,redis320-kombu44}' --result-json /tmp/celery.2.results
316334
- persist_to_workspace:
317335
root: /tmp
318336
paths:
319-
- celery.results
337+
- celery.1.results
338+
- celery.2.results
320339
- *save_cache_step
321340

322341
elasticsearch:
@@ -1042,6 +1061,9 @@ workflows:
10421061
- integration:
10431062
requires:
10441063
- flake8
1064+
- internal:
1065+
requires:
1066+
- flake8
10451067
- jinja2:
10461068
requires:
10471069
- flake8
@@ -1156,6 +1178,7 @@ workflows:
11561178
- grpc
11571179
- httplib
11581180
- integration
1181+
- internal
11591182
- jinja2
11601183
- kombu
11611184
- mako

Rakefile

Lines changed: 2 additions & 146 deletions
Original file line numberDiff line numberDiff line change
@@ -1,90 +1,3 @@
1-
desc "Starts all backing services and run all tests"
2-
task :test do
3-
sh "docker-compose up -d | cat"
4-
begin
5-
sh "tox"
6-
ensure
7-
sh "docker-compose kill"
8-
end
9-
sh "python -m tests.benchmark"
10-
end
11-
12-
desc 'CI dependent task; tests in parallel'
13-
task:test_parallel do
14-
15-
begin
16-
test_cassandra = sh "git diff-tree --no-commit-id --name-only -r HEAD | grep ddtrace/contrib/cassandra"
17-
rescue StandardError => e
18-
test_cassandra = false
19-
end
20-
21-
sh "docker-compose up -d | cat"
22-
23-
# If cassandra hasn't been changed ignore cassandra tests
24-
if not test_cassandra
25-
n_total_envs = `tox -l | grep -v cassandra | wc -l`.to_i
26-
envs = 'tox -l | grep -v cassandra | tr \'\n\' \',\''
27-
else
28-
n_total_envs = `tox -l | wc -l`.to_i
29-
envs = 'tox -l | tr \'\n\' \',\''
30-
end
31-
32-
circle_node_tot = ENV['CIRCLE_NODE_TOTAL'].to_i
33-
n_envs_chunk = n_total_envs / circle_node_tot
34-
env_list_start = 1
35-
env_list_end = n_envs_chunk
36-
begin
37-
for node_index in 0..circle_node_tot
38-
if ENV['CIRCLE_NODE_INDEX'].to_i == node_index then
39-
# Node 0 already does as second task wait test, the others will require it to ensure db connections
40-
if node_index >= 1 then
41-
sh "tox -e wait"
42-
end
43-
sh "#{envs} | cut -d, -f#{env_list_start}-#{env_list_end} | xargs tox -e"
44-
end
45-
env_list_start = env_list_end + 1
46-
env_list_end = env_list_end + n_envs_chunk
47-
end
48-
ensure
49-
sh "docker-compose kill"
50-
end
51-
52-
sh "python -m tests.benchmark"
53-
end
54-
55-
desc "Run tests with envs matching the given pattern."
56-
task :"test:envs", [:grep] do |t, args|
57-
pattern = args[:grep]
58-
if !pattern
59-
puts 'specify a pattern like rake test:envs["py27.*mongo"]'
60-
else
61-
sh "tox -l | grep '#{pattern}' | xargs tox -e"
62-
end
63-
end
64-
65-
namespace :docker do
66-
task :up do
67-
sh "docker-compose up -d | cat"
68-
end
69-
70-
task :down do
71-
sh "docker-compose down"
72-
end
73-
end
74-
75-
76-
desc "install the library in dev mode"
77-
task :dev do
78-
sh "pip uninstall -y ddtrace"
79-
sh "pip install -e ."
80-
end
81-
82-
desc "remove artifacts"
83-
task :clean do
84-
sh 'python setup.py clean'
85-
sh 'rm -rf build *egg* *.whl dist'
86-
end
87-
881
desc "build the docs"
892
task :docs do
903
sh "pip install sphinx"
@@ -94,7 +7,6 @@ task :docs do
947
end
958

969
# Deploy tasks
97-
S3_BUCKET = 'pypi.datadoghq.com'
9810
S3_DIR = ENV['S3_DIR']
9911

10012
desc "release the a new wheel"
@@ -105,7 +17,8 @@ task :'release:wheel' do
10517
# - aws s3 cp dist/*.whl s3://pypi.datadoghq.com/#{s3_dir}/
10618
fail "Missing environment variable S3_DIR" if !S3_DIR or S3_DIR.empty?
10719

108-
sh "mkwheelhouse s3://#{S3_BUCKET}/#{S3_DIR}/ ."
20+
# Use custom mkwheelhouse script to build and upload an sdist to S3 bucket
21+
sh "scripts/mkwheelhouse"
10922
end
11023

11124
desc "release the docs website"
@@ -169,60 +82,3 @@ namespace :pypi do
16982
sh "twine upload #{build}"
17083
end
17184
end
172-
173-
namespace :version do
174-
175-
def get_version()
176-
return `python setup.py --version`.strip()
177-
end
178-
179-
def set_version(old, new)
180-
branch = `git name-rev --name-only HEAD`.strip()
181-
if branch != "master"
182-
puts "you should only tag the master branch"
183-
return
184-
end
185-
msg = "bumping version #{old} => #{new}"
186-
path = "ddtrace/__init__.py"
187-
sh "sed -i 's/#{old}/#{new}/' #{path}"
188-
sh "git commit -m '#{msg}' #{path}"
189-
sh "git tag v#{new}"
190-
puts "Verify everything looks good, then `git push && git push --tags`"
191-
end
192-
193-
def inc_version_num(version, type)
194-
split = version.split(".").map{|v| v.to_i}
195-
if type == 'bugfix'
196-
split[2] += 1
197-
elsif type == 'minor'
198-
split[1] += 1
199-
split[2] = 0
200-
elsif type == 'major'
201-
split[0] += 1
202-
split[1] = 0
203-
split[2] = 0
204-
end
205-
return split.join(".")
206-
end
207-
208-
def inc_version(type)
209-
old = get_version()
210-
new = inc_version_num(old, type)
211-
set_version(old, new)
212-
end
213-
214-
desc "Cut a new bugfix release"
215-
task :bugfix do
216-
inc_version("bugfix")
217-
end
218-
219-
desc "Cut a new minor release"
220-
task :minor do
221-
inc_version("minor")
222-
end
223-
224-
task :major do
225-
inc_version("major")
226-
end
227-
228-
end

ddtrace/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from .tracer import Tracer
55
from .settings import config
66

7-
__version__ = '0.22.0'
7+
__version__ = '0.23.0'
88

99
# a global tracer instance with integration settings
1010
tracer = Tracer()

ddtrace/api.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,17 @@
11
# stdlib
2-
import logging
32
import time
43
import ddtrace
54
from json import loads
65

76
# project
87
from .encoding import get_encoder, JSONEncoder
98
from .compat import httplib, PYTHON_VERSION, PYTHON_INTERPRETER, get_connection_response
9+
from .internal.logger import get_logger
10+
from .payload import Payload
1011
from .utils.deprecation import deprecated
1112

1213

13-
log = logging.getLogger(__name__)
14+
log = get_logger(__name__)
1415

1516
TRACE_COUNT_HEADER = 'X-Datadog-Trace-Count'
1617

@@ -147,9 +148,13 @@ def _downgrade(self):
147148
def send_traces(self, traces):
148149
if not traces:
149150
return
151+
150152
start = time.time()
151-
data = self._encoder.encode_traces(traces)
152-
response = self._put(self._traces, data, len(traces))
153+
payload = Payload(encoder=self._encoder)
154+
for trace in traces:
155+
payload.add_trace(trace)
156+
157+
response = self._put(self._traces, payload.get_payload(), payload.length)
153158

154159
# the API endpoint is not available so we should downgrade the connection and re-try the call
155160
if response.status in [404, 415] and self._fallback:

ddtrace/bootstrap/sitecustomize.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import logging
1010

1111
from ddtrace.utils.formats import asbool, get_env
12+
from ddtrace.internal.logger import get_logger
1213

1314
logs_injection = asbool(get_env('logs', 'injection'))
1415
DD_LOG_FORMAT = '%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] {}- %(message)s'.format(
@@ -32,7 +33,7 @@
3233
else:
3334
logging.basicConfig(format=DD_LOG_FORMAT)
3435

35-
log = logging.getLogger(__name__)
36+
log = get_logger(__name__)
3637

3738
EXTRA_PATCHED_MODULES = {
3839
"bottle": True,

ddtrace/commands/ddtrace_run.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,11 @@
1010
if debug and debug.lower() == "true":
1111
logging.basicConfig(level=logging.DEBUG)
1212

13+
# Do not use `ddtrace.internal.logger.get_logger` here
14+
# DEV: It isn't really necessary to use `DDLogger` here so we want to
15+
# defer importing `ddtrace` until we actually need it.
16+
# As well, no actual rate limiting would apply here since we only
17+
# have a few logged lines
1318
log = logging.getLogger(__name__)
1419

1520
USAGE = """

ddtrace/constants.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
FILTERS_KEY = 'FILTERS'
22
SAMPLE_RATE_METRIC_KEY = '_sample_rate'
33
SAMPLING_PRIORITY_KEY = '_sampling_priority_v1'
4+
ANALYTICS_SAMPLE_RATE_KEY = '_dd1.sr.eausr'
45
ORIGIN_KEY = '_dd.origin'
5-
EVENT_SAMPLE_RATE_KEY = '_dd1.sr.eausr'
66

7-
NUMERIC_TAGS = (EVENT_SAMPLE_RATE_KEY, )
7+
NUMERIC_TAGS = (ANALYTICS_SAMPLE_RATE_KEY, )

ddtrace/context.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
1-
import logging
21
import threading
32

43
from .constants import SAMPLING_PRIORITY_KEY, ORIGIN_KEY
4+
from .internal.logger import get_logger
55
from .utils.formats import asbool, get_env
66

7-
log = logging.getLogger(__name__)
7+
log = get_logger(__name__)
88

99

1010
class Context(object):

ddtrace/contrib/aiobotocore/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
"""
22
The aiobotocore integration will trace all AWS calls made with the ``aiobotocore``
33
library. This integration isn't enabled when applying the default patching.
4-
To enable it, you must run ``patch_all(botocore=True)``
4+
To enable it, you must run ``patch_all(aiobotocore=True)``
55
66
::
77

ddtrace/contrib/aiobotocore/patch.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
import asyncio
22
from ddtrace.vendor import wrapt
3+
from ddtrace import config
34
import aiobotocore.client
45

56
from aiobotocore.endpoint import ClientResponseContentProxy
67

8+
from ...constants import ANALYTICS_SAMPLE_RATE_KEY
79
from ...pin import Pin
810
from ...ext import http, aws
911
from ...compat import PYTHON_VERSION_INFO
@@ -117,4 +119,10 @@ def _wrapped_api_call(original_func, instance, args, kwargs):
117119
if request_id2:
118120
span.set_tag('aws.requestid2', request_id2)
119121

122+
# set analytics sample rate
123+
span.set_tag(
124+
ANALYTICS_SAMPLE_RATE_KEY,
125+
config.aiobotocore.get_analytics_sample_rate()
126+
)
127+
120128
return result

0 commit comments

Comments
 (0)