Skip to content

Commit 3d05b32

Browse files
authored
Merge pull request #1126 from minrk/test-tmp
Add check-tmp step to local repo tests
2 parents f6ecb28 + e7c93b0 commit 3d05b32

File tree

5 files changed

+147
-10
lines changed

5 files changed

+147
-10
lines changed

repo2docker/buildpacks/_r_base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def rstudio_base_scripts(r_version):
3333
echo '{shiny_sha256sum} /tmp/shiny.deb' | sha256sum -c - && \
3434
apt-get update > /dev/null && \
3535
apt install -y --no-install-recommends /tmp/rstudio.deb /tmp/shiny.deb && \
36-
rm /tmp/rstudio.deb && \
36+
rm /tmp/*.deb && \
3737
apt-get -qq purge && \
3838
apt-get -qq clean && \
3939
rm -rf /var/lib/apt/lists/*

repo2docker/buildpacks/pipfile/__init__.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111

1212
import toml
1313

14+
from ...semver import parse_version as V
1415
from ..conda import CondaBuildPack
1516

1617
VERSION_PAT = re.compile(r"\d+(\.\d+)*")
@@ -87,8 +88,16 @@ def get_preassemble_scripts(self):
8788
"""scripts to run prior to staging the repo contents"""
8889
scripts = super().get_preassemble_scripts()
8990
# install pipenv to install dependencies within Pipfile.lock or Pipfile
91+
if V(self.python_version) < V("3.6"):
92+
# last pipenv version to support 2.7, 3.5
93+
pipenv_version = "2021.5.29"
94+
else:
95+
pipenv_version = "2022.1.8"
9096
scripts.append(
91-
("${NB_USER}", "${KERNEL_PYTHON_PREFIX}/bin/pip install pipenv==2018.11.26")
97+
(
98+
"${NB_USER}",
99+
f"${{KERNEL_PYTHON_PREFIX}}/bin/pip install --no-cache-dir pipenv=={pipenv_version}",
100+
)
92101
)
93102
return scripts
94103

tests/check-tmp

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
#!/usr/bin/env python3
2+
"""
3+
Script to check for leftover files
4+
5+
Checks a collection of temporary or cache directories,
6+
to ensure we aren't wasting image size by forgetting cleanup steps.
7+
8+
This script is run in every local repo image we test
9+
"""
10+
11+
import os
12+
import sys
13+
from subprocess import check_output
14+
from textwrap import indent
15+
16+
# directories larger than this are considered a failure
17+
# a few little files here aren't a problem
18+
THRESHOLD = 1 # in MB
19+
20+
MB = 1024 * 1024
21+
22+
# the paths to check
23+
# all of these locations
24+
# should be cleaned up
25+
# missing is okay
26+
PATHS = [
27+
"/tmp/",
28+
# check whole home?
29+
# this shouldn't be empty, but for our tests (so far) it should be very small
30+
# This is the easiest way to ensure we aren't leaving any unexpected files
31+
# without knowing ahead of time where all possible caches might be (.npm, .cache, etc.)
32+
"~/",
33+
"/root/",
34+
]
35+
36+
37+
def du(path):
38+
"""Return disk usage in megabytes of a path"""
39+
# -ks: get total size, reported in kilobytes
40+
out = check_output(["du", "-Hks", path])
41+
return int(out.split(None, 1)[0]) / 1024
42+
43+
44+
def check_dir_size(path):
45+
"""Check the size of a directory
46+
47+
Returns:
48+
49+
True: directory size is below THRESHOLD or is missing
50+
False: directory is larger than THRESHOLD
51+
"""
52+
path = os.path.expanduser(path)
53+
54+
if not os.path.exists(path):
55+
print("{path}: missing OK".format(**locals()))
56+
return True
57+
58+
size_mb = du(path)
59+
print("{path}: {size_mb:.1f} MB".format(**locals()), end=" ")
60+
if size_mb <= THRESHOLD:
61+
print("OK")
62+
return True
63+
else:
64+
print("FAIL")
65+
# check size of files one-level deep (du only reports dirs)
66+
for name in os.listdir(path):
67+
subpath = os.path.join(path, name)
68+
if os.path.isfile(subpath):
69+
file_sz = os.stat(subpath).st_size / MB
70+
if file_sz > 0.1:
71+
print(" {file_sz:.1f}M {subpath}".format(**locals()))
72+
# get report on all subdirs that are at least 100k
73+
print(
74+
indent(
75+
check_output(["du", "-Hh", "-t", "100000", path]).decode("utf8"), " "
76+
)
77+
)
78+
return False
79+
80+
81+
def main():
82+
results = [check_dir_size(path) for path in PATHS]
83+
if not all(results):
84+
sys.exit(1)
85+
86+
87+
if __name__ == "__main__":
88+
main()

tests/conftest.py

Lines changed: 46 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,14 +22,17 @@
2222
import requests
2323
import subprocess
2424
import time
25-
2625
from tempfile import TemporaryDirectory
2726

27+
28+
import escapism
2829
import pytest
2930
import yaml
3031

3132
from repo2docker.__main__ import make_r2d
3233

34+
TESTS_DIR = os.path.abspath(os.path.dirname(__file__))
35+
3336

3437
def pytest_collect_file(parent, path):
3538
if path.basename == "verify":
@@ -38,12 +41,20 @@ def pytest_collect_file(parent, path):
3841
return RemoteRepoList.from_parent(parent, fspath=path)
3942

4043

41-
def make_test_func(args):
44+
def make_test_func(args, skip_build=False, extra_run_kwargs=None):
4245
"""Generate a test function that runs repo2docker"""
4346

4447
def test():
4548
app = make_r2d(args)
4649
app.initialize()
50+
if extra_run_kwargs:
51+
app.extra_run_kwargs.update(extra_run_kwargs)
52+
if skip_build:
53+
54+
def build_noop():
55+
print("Skipping build")
56+
57+
app.skip_build = build_noop
4758
if app.run_cmd:
4859
# verify test, run it
4960
app.start()
@@ -184,14 +195,18 @@ def repo_with_submodule():
184195
class Repo2DockerTest(pytest.Function):
185196
"""A pytest.Item for running repo2docker"""
186197

187-
def __init__(self, name, parent, args=None):
198+
def __init__(
199+
self, name, parent, args=None, skip_build=False, extra_run_kwargs=None
200+
):
188201
self.args = args
189202
self.save_cwd = os.getcwd()
190-
f = parent.obj = make_test_func(args)
203+
f = parent.obj = make_test_func(
204+
args, skip_build=skip_build, extra_run_kwargs=extra_run_kwargs
205+
)
191206
super().__init__(name, parent, callobj=f)
192207

193208
def reportinfo(self):
194-
return self.parent.fspath, None, ""
209+
return (self.parent.fspath, None, "")
195210

196211
def repr_failure(self, excinfo):
197212
err = excinfo.value
@@ -217,11 +232,35 @@ def collect(self):
217232
extra_args = yaml.safe_load(f)
218233
args += extra_args
219234

235+
print(self.fspath.basename, self.fspath.dirname, str(self.fspath))
236+
# re-use image name for multiple tests of the same image
237+
# so we don't run through the build twice
238+
rel_repo_dir = os.path.relpath(self.fspath.dirname, TESTS_DIR)
239+
image_name = f"r2d-tests-{escapism.escape(rel_repo_dir, escape_char='-').lower()}-{int(time.time())}"
240+
args.append(f"--image-name={image_name}")
220241
args.append(self.fspath.dirname)
221-
222242
yield Repo2DockerTest.from_parent(self, name="build", args=args)
243+
244+
yield Repo2DockerTest.from_parent(
245+
self,
246+
name=self.fspath.basename,
247+
args=args + ["./verify"],
248+
skip_build=True,
249+
)
250+
251+
# mount the tests dir as a volume
252+
check_tmp_args = (
253+
args[:-1]
254+
+ ["--volume", f"{TESTS_DIR}:/io/tests"]
255+
+ [args[-1], "/io/tests/check-tmp"]
256+
)
257+
223258
yield Repo2DockerTest.from_parent(
224-
self, name=self.fspath.basename, args=args + ["./verify"]
259+
self,
260+
name="check-tmp",
261+
args=check_tmp_args,
262+
skip_build=True,
263+
extra_run_kwargs={"user": "root"},
225264
)
226265

227266

tests/venv/postBuild/postBuild

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
#!/bin/bash
22
jupyter nbextension enable --py --sys-prefix ipyleaflet
3-
npm install --global configurable-http-proxy
3+
npm install --global configurable-http-proxy
4+
npm cache clean --force

0 commit comments

Comments
 (0)