Skip to content

Commit 2f8c571

Browse files
committed
Fix test dirs
1 parent 8414155 commit 2f8c571

File tree

4 files changed

+33
-31
lines changed

4 files changed

+33
-31
lines changed

scripts/builder/build.mk

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ clean--build:
3232
build: $(BUILD_TIMESTAMP) ## Complete project install and build artifacts for deployment
3333

3434
$(BUILD_TIMESTAMP): $(BUILD_DEPENDENCIES)
35-
@find $(CURDIR) -name make.py | xargs -n 1 -P 8 -I % bash -c 'poetry run python %'
35+
@find $(CURDIR)/src -name make.py | xargs -n 1 -P 8 -I % bash -c 'poetry run python %'
3636
touch $(BUILD_TIMESTAMP)
3737

3838
generate--sbom: build

scripts/builder/layer_build.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -47,13 +47,14 @@ def create_zip_package(
4747

4848

4949
def build(file):
50-
layer_base_dir = get_base_dir(file)
51-
package_name = layer_base_dir.name
50+
if "archived_epr" not in file:
51+
layer_base_dir = get_base_dir(file)
52+
package_name = layer_base_dir.name
5253

53-
with create_zip_package(
54-
package_name=package_name, base_dir=layer_base_dir
55-
) as build_dir:
56-
copy_source_code(source_dir=layer_base_dir, build_dir=build_dir)
54+
with create_zip_package(
55+
package_name=package_name, base_dir=layer_base_dir
56+
) as build_dir:
57+
copy_source_code(source_dir=layer_base_dir, build_dir=build_dir)
5758

5859

5960
@contextmanager

scripts/builder/third_party_build.py

Lines changed: 18 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -98,21 +98,22 @@ def build_third_party(
9898
group: str,
9999
dependencies: dict[str, str],
100100
):
101-
base_dir = get_base_dir(file)
102-
package_zipper = create_zip_package(
103-
package_name=f"third_party_{group}", base_dir=base_dir, third_party=True
104-
)
105-
docker_file = get_dockerfile_path(base_dir=base_dir, group=group)
101+
if "archived_epr" not in file:
102+
base_dir = get_base_dir(file)
103+
package_zipper = create_zip_package(
104+
package_name=f"third_party_{group}", base_dir=base_dir, third_party=True
105+
)
106+
docker_file = get_dockerfile_path(base_dir=base_dir, group=group)
106107

107-
with TemporaryDirectory() as root_dir, package_zipper as build_dir:
108-
root_dir = Path(root_dir).resolve()
109-
venv_dir = root_dir / VENV
110-
with create_temp_path(path=venv_dir, is_dir=True):
111-
create_requirements(
112-
root_dir=root_dir,
113-
pyproject_toml_path=pyproject_toml_path,
114-
group=group,
115-
dependencies=dependencies,
116-
)
117-
docker_run(docker_file=docker_file, root_dir=root_dir, group=group)
118-
copy_source_code(source_dir=venv_dir, build_dir=build_dir)
108+
with TemporaryDirectory() as root_dir, package_zipper as build_dir:
109+
root_dir = Path(root_dir).resolve()
110+
venv_dir = root_dir / VENV
111+
with create_temp_path(path=venv_dir, is_dir=True):
112+
create_requirements(
113+
root_dir=root_dir,
114+
pyproject_toml_path=pyproject_toml_path,
115+
group=group,
116+
dependencies=dependencies,
117+
)
118+
docker_run(docker_file=docker_file, root_dir=root_dir, group=group)
119+
copy_source_code(source_dir=venv_dir, build_dir=build_dir)

scripts/test/test.mk

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,25 +10,25 @@ RUN_SPEEDTEST = ?= FALSE
1010
PROXYGEN_PRODUCT_TIMESTAMP = $(TIMESTAMP_DIR)/.proxygen-product.stamp
1111

1212
_pytest:
13-
AWS_DEFAULT_REGION=$(AWS_DEFAULT_REGION) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN) poetry run python -m pytest $(PYTEST_FLAGS) $(_INTERNAL_FLAGS) $(_CACHE_CLEAR)
13+
AWS_DEFAULT_REGION=$(AWS_DEFAULT_REGION) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN) poetry run python -m pytest $(PYTEST_FLAGS) --ignore=archived_epr $(_INTERNAL_FLAGS) $(_CACHE_CLEAR)
1414

1515
_behave:
1616
AWS_DEFAULT_REGION=$(AWS_DEFAULT_REGION) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN) poetry run python -m behave src/api/tests/feature_tests $(BEHAVE_FLAGS) $(_INTERNAL_FLAGS) --no-skipped
1717

1818
test--unit: ## Run unit (pytest) tests
19-
$(MAKE) _pytest _INTERNAL_FLAGS="-m 'unit' --ignore=archived_epr $(_INTERNAL_FLAGS)" _CACHE_CLEAR=$(_CACHE_CLEAR)
19+
$(MAKE) _pytest _INTERNAL_FLAGS="-m 'unit' $(_INTERNAL_FLAGS)" _CACHE_CLEAR=$(_CACHE_CLEAR)
2020

2121
test--integration: aws--login ## Run integration (pytest) tests
22-
$(MAKE) _pytest _INTERNAL_FLAGS="-m 'integration' --ignore=archived_epr $(_INTERNAL_FLAGS)" _CACHE_CLEAR=$(_CACHE_CLEAR) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN)
22+
$(MAKE) _pytest _INTERNAL_FLAGS="-m 'integration' $(_INTERNAL_FLAGS)" _CACHE_CLEAR=$(_CACHE_CLEAR) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN)
2323

2424
test--slow: ## Run slow (pytest) tests
25-
$(MAKE) _pytest _INTERNAL_FLAGS="-m 'slow' --ignore=archived_epr" _CACHE_CLEAR=$(_CACHE_CLEAR)
25+
$(MAKE) _pytest _INTERNAL_FLAGS="-m 'slow'" _CACHE_CLEAR=$(_CACHE_CLEAR)
2626

2727
test--s3: aws--login ## Run (pytest) tests that require s3 downloads
28-
$(MAKE) _pytest _INTERNAL_FLAGS="-m 's3' --ignore=archived_epr $(_INTERNAL_FLAGS)" _CACHE_CLEAR=$(_CACHE_CLEAR) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN)
28+
$(MAKE) _pytest _INTERNAL_FLAGS="-m 's3' $(_INTERNAL_FLAGS)" _CACHE_CLEAR=$(_CACHE_CLEAR) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN)
2929

3030
test--smoke: aws--login ## Run end-to-end smoke tests (pytest)
31-
AWS_DEFAULT_REGION=$(AWS_DEFAULT_REGION) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN) WORKSPACE=$(WORKSPACE) ACCOUNT=$(ACCOUNT) poetry run python -m pytest $(PYTEST_FLAGS) -m 'smoke' --ignore=src/layers --ignore=src/etl --ignore=archived_epr/src_old $(_CACHE_CLEAR)
31+
AWS_DEFAULT_REGION=$(AWS_DEFAULT_REGION) AWS_ACCESS_KEY_ID=$(AWS_ACCESS_KEY_ID) AWS_SECRET_ACCESS_KEY=$(AWS_SECRET_ACCESS_KEY) AWS_SESSION_TOKEN=$(AWS_SESSION_TOKEN) WORKSPACE=$(WORKSPACE) ACCOUNT=$(ACCOUNT) poetry run python -m pytest $(PYTEST_FLAGS) -m 'smoke' --ignore=src/layers --ignore=src/etl --ignore=archived_epr $(_CACHE_CLEAR)
3232

3333
test--%--rerun: ## Rerun failed integration or unit (pytest) tests
3434
$(MAKE) test--$* _INTERNAL_FLAGS="--last-failed --last-failed-no-failures none" _CACHE_CLEAR=$(_CACHE_CLEAR)
@@ -42,4 +42,4 @@ test--feature--%--auto-retry: ## Autoretry of failed feature (gherkin) tests
4242
$(MAKE) test--feature--$* _INTERNAL_FLAGS="--define='auto_retry=true'"
4343

4444
test--sds--matrix: ## Run end-to-end smoke tests that check data matches betweeen cpm and ldap
45-
SDS_PROD_APIKEY=$(SDS_PROD_APIKEY) SDS_DEV_APIKEY=$(SDS_DEV_APIKEY) USE_CPM_PROD=$(USE_CPM_PROD) TEST_COUNT=$(TEST_COUNT) COMPARISON_ENV=$(COMPARISON_ENV) RUN_SPEEDTEST=$(RUN_SPEEDTEST) poetry run python -m pytest $(PYTEST_FLAGS) -m 'matrix' --ignore=src/layers --ignore=src/etl $(_CACHE_CLEAR)
45+
SDS_PROD_APIKEY=$(SDS_PROD_APIKEY) SDS_DEV_APIKEY=$(SDS_DEV_APIKEY) USE_CPM_PROD=$(USE_CPM_PROD) TEST_COUNT=$(TEST_COUNT) COMPARISON_ENV=$(COMPARISON_ENV) RUN_SPEEDTEST=$(RUN_SPEEDTEST) poetry run python -m pytest $(PYTEST_FLAGS) -m 'matrix' --ignore=src/layers --ignore=src/etl --ignore=archived_epr $(_CACHE_CLEAR)

0 commit comments

Comments
 (0)