forked from jantman/biweeklybudget
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtestresults2s3.py
More file actions
executable file
·137 lines (120 loc) · 4.7 KB
/
testresults2s3.py
File metadata and controls
executable file
·137 lines (120 loc) · 4.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
#!/usr/bin/env python
"""
Upload test artifacts to S3, from TravisCI, along with a generated index page.
"""
import os
import boto3
import logging
from mimetypes import guess_type
FORMAT = "[%(asctime)s %(levelname)s] %(message)s"
logging.basicConfig(level=logging.INFO, format=FORMAT)
logger = logging.getLogger()
# suppress boto3 internal logging below WARNING level
boto3_log = logging.getLogger("boto3")
boto3_log.setLevel(logging.WARNING)
boto3_log.propagate = True
# suppress botocore internal logging below WARNING level
botocore_log = logging.getLogger("botocore")
botocore_log.setLevel(logging.WARNING)
botocore_log.propagate = True
# suppress s3transfer internal logging below WARNING level
s3transfer_log = logging.getLogger("s3transfer")
s3transfer_log.setLevel(logging.WARNING)
s3transfer_log.propagate = True
class S3Uploader(object):
def __init__(self):
self._s3 = boto3.resource(
's3', region_name='us-east-1',
aws_access_key_id=os.environ['TRAVIS_ACCESS_KEY'],
aws_secret_access_key=os.environ['TRAVIS_SECRET_KEY']
)
if 'TRAVIS_REPO_SLUG' not in os.environ:
raise RuntimeError('TRAVIS_REPO_SLUG not in environment')
if 'TRAVIS_JOB_NUMBER' not in os.environ:
raise RuntimeError('TRAVIS_JOB_NUMBER not in environment')
self.bkt = self._s3.Bucket('jantman-personal-public')
self.basedir = os.path.dirname(os.path.realpath(__file__)) + '/'
self.prefix = 'travisci/%s/%s/' % (
os.environ['TRAVIS_REPO_SLUG'],
os.environ['TRAVIS_JOB_NUMBER']
)
def run(self):
files = self._list_all_files(['htmlcov', 'results', 'coverage.xml'])
files.append(self.write_index_html(files))
for f in sorted(files):
self.upload_file(self.key_for_path(f), f)
print("Uploaded %d files" % len(files))
print("\nResults available at:\n")
print('http://jantman-personal-public.s3-website-us-east-1.'
'amazonaws.com/%sindex.html' % self.prefix)
def write_index_html(self, files):
p = os.path.join(self.basedir, 'index.html')
title = 'TravisCI %s Job %s' % (
os.environ['TRAVIS_REPO_SLUG'],
os.environ['TRAVIS_JOB_NUMBER']
)
s = '<html><head><title>%s</title></head>' % title
s += '<body><h1>%s</h1><ul>' % title
for f in sorted(files):
f = f.replace(self.basedir, '')
s += '<li><a href="%s">%s</a>' % (f, f)
s += '</ul></body></html>'
with open(p, 'w') as fh:
fh.write(s)
return p
def key_for_path(self, f):
"""
Return the key in S3 for a file at f
"""
return f.replace(self.basedir, self.prefix)
def _list_all_files(self, paths):
"""
Given a list of paths on the local filesystem, return a list of all
files in ``paths`` that exist, and for any directories in ``paths`` that
exist, all files recursively contained in them.
:param paths: list of file/directory paths to check
:type paths: list
:return: list of all extant files contained under those paths
:rtype: list
"""
files = []
logger.info('Listing files under %d paths', len(paths))
for p in paths:
p = os.path.abspath(os.path.join(self.basedir, p))
if not os.path.exists(p):
logger.warning('Skipping non-existent path: %s', p)
continue
if os.path.isfile(p):
files.append(p)
elif os.path.isdir(p):
dirs = self._listdir(p)
logger.debug('Found %d files under %s', len(dirs), p)
files.extend(dirs)
else:
logger.warning('Skipping unknown path type: %s', p)
logger.debug('Done finding candidate files.')
return list(set(files))
def _listdir(self, path):
"""
Given the path to a directory, return a list of all file paths under
that directory (recursively).
:param path: path to directory
:type path: str
:return: list of regular file paths under that directory
:rtype: list
"""
files = []
for root, _, filenames in os.walk(path):
for fn in filenames:
p = os.path.join(root, fn)
if os.path.isfile(p):
files.append(p)
return files
def upload_file(self, key, path):
mt = guess_type(path)[0]
logger.info('Uploading %s to %s (Content-Type: %s)', path, key, mt)
self.bkt.upload_file(
path, key, ExtraArgs={'ContentType': mt}
)
if __name__ == "__main__":
S3Uploader().run()