Skip to content

Commit ddf7371

Browse files
authored
[dev/tooling] Use custom mkwheelhouse script to build/upload sdist packages (#847)
* Use custom mkwheelhouse script * Also test building a wheel * ensure 'wheel' is installed * add missing arg * key.name instead of key
1 parent 4aad7af commit ddf7371

File tree

3 files changed

+64
-148
lines changed

3 files changed

+64
-148
lines changed

.circleci/config.yml

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,11 +62,14 @@ jobs:
6262
# Install required dependencies
6363
# DEV: `pyopenssl` needed until the following PR is released
6464
# https://github.com/pypa/twine/pull/447
65-
- run: pip install twine readme_renderer[md] pyopenssl
65+
# DEV: `wheel` is needed to run `bdist_wheel`
66+
- run: pip install twine readme_renderer[md] pyopenssl wheel
6667
# Ensure we didn't cache from previous runs
6768
- run: rm -rf dist/
68-
# Ensure package will build
69+
# Ensure source package will build
6970
- run: python setup.py sdist
71+
# Ensure wheel will build
72+
- run: python setup.py bdist_wheel
7073
# Ensure package long description is valid and will render
7174
# https://github.com/pypa/twine/tree/6c4d5ecf2596c72b89b969ccc37b82c160645df8#twine-check
7275
- run: twine check dist/*

Rakefile

Lines changed: 2 additions & 146 deletions
Original file line numberDiff line numberDiff line change
@@ -1,90 +1,3 @@
1-
desc "Starts all backing services and run all tests"
2-
task :test do
3-
sh "docker-compose up -d | cat"
4-
begin
5-
sh "tox"
6-
ensure
7-
sh "docker-compose kill"
8-
end
9-
sh "python -m tests.benchmark"
10-
end
11-
12-
desc 'CI dependent task; tests in parallel'
13-
task:test_parallel do
14-
15-
begin
16-
test_cassandra = sh "git diff-tree --no-commit-id --name-only -r HEAD | grep ddtrace/contrib/cassandra"
17-
rescue StandardError => e
18-
test_cassandra = false
19-
end
20-
21-
sh "docker-compose up -d | cat"
22-
23-
# If cassandra hasn't been changed ignore cassandra tests
24-
if not test_cassandra
25-
n_total_envs = `tox -l | grep -v cassandra | wc -l`.to_i
26-
envs = 'tox -l | grep -v cassandra | tr \'\n\' \',\''
27-
else
28-
n_total_envs = `tox -l | wc -l`.to_i
29-
envs = 'tox -l | tr \'\n\' \',\''
30-
end
31-
32-
circle_node_tot = ENV['CIRCLE_NODE_TOTAL'].to_i
33-
n_envs_chunk = n_total_envs / circle_node_tot
34-
env_list_start = 1
35-
env_list_end = n_envs_chunk
36-
begin
37-
for node_index in 0..circle_node_tot
38-
if ENV['CIRCLE_NODE_INDEX'].to_i == node_index then
39-
# Node 0 already does as second task wait test, the others will require it to ensure db connections
40-
if node_index >= 1 then
41-
sh "tox -e wait"
42-
end
43-
sh "#{envs} | cut -d, -f#{env_list_start}-#{env_list_end} | xargs tox -e"
44-
end
45-
env_list_start = env_list_end + 1
46-
env_list_end = env_list_end + n_envs_chunk
47-
end
48-
ensure
49-
sh "docker-compose kill"
50-
end
51-
52-
sh "python -m tests.benchmark"
53-
end
54-
55-
desc "Run tests with envs matching the given pattern."
56-
task :"test:envs", [:grep] do |t, args|
57-
pattern = args[:grep]
58-
if !pattern
59-
puts 'specify a pattern like rake test:envs["py27.*mongo"]'
60-
else
61-
sh "tox -l | grep '#{pattern}' | xargs tox -e"
62-
end
63-
end
64-
65-
namespace :docker do
66-
task :up do
67-
sh "docker-compose up -d | cat"
68-
end
69-
70-
task :down do
71-
sh "docker-compose down"
72-
end
73-
end
74-
75-
76-
desc "install the library in dev mode"
77-
task :dev do
78-
sh "pip uninstall -y ddtrace"
79-
sh "pip install -e ."
80-
end
81-
82-
desc "remove artifacts"
83-
task :clean do
84-
sh 'python setup.py clean'
85-
sh 'rm -rf build *egg* *.whl dist'
86-
end
87-
881
desc "build the docs"
892
task :docs do
903
sh "pip install sphinx"
@@ -94,7 +7,6 @@ task :docs do
947
end
958

969
# Deploy tasks
97-
S3_BUCKET = 'pypi.datadoghq.com'
9810
S3_DIR = ENV['S3_DIR']
9911

10012
desc "release the a new wheel"
@@ -105,7 +17,8 @@ task :'release:wheel' do
10517
# - aws s3 cp dist/*.whl s3://pypi.datadoghq.com/#{s3_dir}/
10618
fail "Missing environment variable S3_DIR" if !S3_DIR or S3_DIR.empty?
10719

108-
sh "mkwheelhouse s3://#{S3_BUCKET}/#{S3_DIR}/ ."
20+
# Use custom mkwheelhouse script to build and upload an sdist to S3 bucket
21+
sh "scripts/mkwheelhouse"
10922
end
11023

11124
desc "release the docs website"
@@ -169,60 +82,3 @@ namespace :pypi do
16982
sh "twine upload #{build}"
17083
end
17184
end
172-
173-
namespace :version do
174-
175-
def get_version()
176-
return `python setup.py --version`.strip()
177-
end
178-
179-
def set_version(old, new)
180-
branch = `git name-rev --name-only HEAD`.strip()
181-
if branch != "master"
182-
puts "you should only tag the master branch"
183-
return
184-
end
185-
msg = "bumping version #{old} => #{new}"
186-
path = "ddtrace/__init__.py"
187-
sh "sed -i 's/#{old}/#{new}/' #{path}"
188-
sh "git commit -m '#{msg}' #{path}"
189-
sh "git tag v#{new}"
190-
puts "Verify everything looks good, then `git push && git push --tags`"
191-
end
192-
193-
def inc_version_num(version, type)
194-
split = version.split(".").map{|v| v.to_i}
195-
if type == 'bugfix'
196-
split[2] += 1
197-
elsif type == 'minor'
198-
split[1] += 1
199-
split[2] = 0
200-
elsif type == 'major'
201-
split[0] += 1
202-
split[1] = 0
203-
split[2] = 0
204-
end
205-
return split.join(".")
206-
end
207-
208-
def inc_version(type)
209-
old = get_version()
210-
new = inc_version_num(old, type)
211-
set_version(old, new)
212-
end
213-
214-
desc "Cut a new bugfix release"
215-
task :bugfix do
216-
inc_version("bugfix")
217-
end
218-
219-
desc "Cut a new minor release"
220-
task :minor do
221-
inc_version("minor")
222-
end
223-
224-
task :major do
225-
inc_version("major")
226-
end
227-
228-
end

scripts/mkwheelhouse

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
#!/usr/bin/env python
2+
import os
3+
import shutil
4+
import tempfile
5+
6+
import mkwheelhouse
7+
8+
S3_BUCKET = 'pypi.datadoghq.com'
9+
S3_DIR = os.environ['S3_DIR']
10+
11+
12+
# DEV: This is the same `mkwheelhouse.build_wheels` except we are running `python setup.py sdist` instead
13+
def build_sdist():
14+
build_dir = tempfile.mkdtemp(prefix='mkwheelhouse-')
15+
args = [
16+
'python', 'setup.py', 'sdist',
17+
'--dist-dir', build_dir,
18+
]
19+
mkwheelhouse.spawn(args)
20+
return build_dir
21+
22+
23+
# DEV: This is the same as `mkwheelhouse.Bucket.make_index`, except we include `*.whl` and `*.tar.gz` files
24+
def make_index(bucket):
25+
doc, tag, text = mkwheelhouse.yattag.Doc().tagtext()
26+
with tag('html'):
27+
for key in bucket.list():
28+
# Skip over any non-wheel or non-source dist
29+
if not key.name.endswith('.whl') and not key.name.endswith('.tar.gz'):
30+
continue
31+
32+
with tag('a', href=bucket.generate_url(key)):
33+
text(key.name)
34+
doc.stag('br')
35+
36+
return doc.getvalue()
37+
38+
39+
# DEV: This is the same as `mkwheelhouse.run` except we hard code some values and use our custom functions instead
40+
def run():
41+
s3_url = 's3://{0}/{1}'.format(S3_BUCKET, S3_DIR)
42+
acl = 'private'
43+
bucket = mkwheelhouse.Bucket(s3_url)
44+
45+
if not bucket.has_key('index.html'): # noqa
46+
bucket.put('<!DOCTYPE html><html></html>', 'index.html', acl=acl)
47+
48+
index_url = bucket.generate_url('index.html')
49+
build_dir = build_sdist()
50+
bucket.sync(build_dir, acl=acl)
51+
bucket.put(make_index(bucket), key='index.html', acl=acl)
52+
shutil.rmtree(build_dir)
53+
print('mkwheelhouse: index written to', index_url)
54+
55+
56+
if __name__ == '__main__':
57+
run()

0 commit comments

Comments
 (0)