Skip to content

Commit 22f4ccc

Browse files
committed
refactor(tests/nixos/s3-binary-cache-store): use a PKGS dict
Replace individual PKG_A, PKG_B, and PKG_C variables with a PKGS dictionary. This will enable `@with_clean_client_store` in the future.
1 parent b56e456 commit 22f4ccc

File tree

1 file changed

+24
-22
lines changed

1 file changed

+24
-22
lines changed

tests/nixos/s3-binary-cache-store.nix

Lines changed: 24 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,11 @@ in
8383
ENDPOINT = 'http://server:9000'
8484
REGION = 'eu-west-1'
8585
86-
PKG_A = '${pkgA}'
87-
PKG_B = '${pkgB}'
88-
PKG_C = '${pkgC}'
86+
PKGS = {
87+
'A': '${pkgA}',
88+
'B': '${pkgB}',
89+
'C': '${pkgC}',
90+
}
8991
9092
ENV_WITH_CREDS = f"AWS_ACCESS_KEY_ID={ACCESS_KEY} AWS_SECRET_ACCESS_KEY={SECRET_KEY}"
9193
@@ -168,7 +170,7 @@ in
168170
store_url = make_s3_url(bucket)
169171
output = server.succeed(
170172
f"{ENV_WITH_CREDS} nix copy --debug --to '{store_url}' "
171-
f"{PKG_A} {PKG_B} {PKG_C} 2>&1"
173+
f"{PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1"
172174
)
173175
174176
assert_count(
@@ -180,7 +182,7 @@ in
180182
181183
print("✓ Credential provider created once and cached")
182184
183-
@with_test_bucket(populate_with=[PKG_A])
185+
@with_test_bucket(populate_with=[PKGS['A']])
184186
def test_fetchurl_basic(bucket):
185187
"""Test builtins.fetchurl works with s3:// URLs"""
186188
print("\n=== Testing builtins.fetchurl ===")
@@ -216,7 +218,7 @@ in
216218
217219
print("✓ Error messages format URLs correctly")
218220
219-
@with_test_bucket(populate_with=[PKG_A])
221+
@with_test_bucket(populate_with=[PKGS['A']])
220222
def test_fork_credential_preresolution(bucket):
221223
"""Test credential pre-resolution in forked processes"""
222224
print("\n=== Testing Fork Credential Pre-resolution ===")
@@ -296,7 +298,7 @@ in
296298
297299
print(" ✓ Child uses pre-resolved credentials (no new providers)")
298300
299-
@with_test_bucket(populate_with=[PKG_A, PKG_B, PKG_C])
301+
@with_test_bucket(populate_with=[PKGS['A'], PKGS['B'], PKGS['C']])
300302
def test_store_operations(bucket):
301303
"""Test nix store info and copy operations"""
302304
print("\n=== Testing Store Operations ===")
@@ -316,11 +318,11 @@ in
316318
print(f" ✓ Store URL: {store_info['url']}")
317319
318320
# Test copy from store
319-
client.fail(f"nix path-info {PKG_A}")
321+
client.fail(f"nix path-info {PKGS['A']}")
320322
321323
output = client.succeed(
322324
f"{ENV_WITH_CREDS} nix copy --debug --no-check-sigs "
323-
f"--from '{store_url}' {PKG_A} {PKG_B} {PKG_C} 2>&1"
325+
f"--from '{store_url}' {PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1"
324326
)
325327
326328
assert_count(
@@ -330,12 +332,12 @@ in
330332
"Client credential provider caching failed"
331333
)
332334
333-
client.succeed(f"nix path-info {PKG_A}")
335+
client.succeed(f"nix path-info {PKGS['A']}")
334336
335337
print(" ✓ nix copy works")
336338
print(" ✓ Credentials cached on client")
337339
338-
@with_test_bucket(populate_with=[PKG_A])
340+
@with_test_bucket(populate_with=[PKGS['A']])
339341
def test_url_format_variations(bucket):
340342
"""Test different S3 URL parameter combinations"""
341343
print("\n=== Testing URL Format Variations ===")
@@ -350,7 +352,7 @@ in
350352
client.succeed(f"{ENV_WITH_CREDS} nix store info --store '{url2}' >&2")
351353
print(" ✓ Parameter order: endpoint before region works")
352354
353-
@with_test_bucket(populate_with=[PKG_A])
355+
@with_test_bucket(populate_with=[PKGS['A']])
354356
def test_concurrent_fetches(bucket):
355357
"""Validate thread safety with concurrent S3 operations"""
356358
print("\n=== Testing Concurrent Fetches ===")
@@ -418,16 +420,16 @@ in
418420
print("\n=== Testing Compression: narinfo (gzip) ===")
419421
420422
store_url = make_s3_url(bucket, **{'narinfo-compression': 'gzip'})
421-
server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_B}")
423+
server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['B']}")
422424
423-
pkg_hash = get_package_hash(PKG_B)
425+
pkg_hash = get_package_hash(PKGS['B'])
424426
verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "gzip")
425427
426428
print(" ✓ .narinfo has Content-Encoding: gzip")
427429
428430
# Verify client can download and decompress
429-
client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKG_B}")
430-
client.succeed(f"nix path-info {PKG_B}")
431+
client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['B']}")
432+
client.succeed(f"nix path-info {PKGS['B']}")
431433
432434
print(" ✓ Client decompressed .narinfo successfully")
433435
@@ -441,9 +443,9 @@ in
441443
**{'narinfo-compression': 'xz', 'write-nar-listing': 'true', 'ls-compression': 'gzip'}
442444
)
443445
444-
server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_C}")
446+
server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['C']}")
445447
446-
pkg_hash = get_package_hash(PKG_C)
448+
pkg_hash = get_package_hash(PKGS['C'])
447449
448450
# Verify .narinfo has xz compression
449451
verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "xz")
@@ -454,8 +456,8 @@ in
454456
print(" ✓ .ls has Content-Encoding: gzip")
455457
456458
# Verify client can download with mixed compression
457-
client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKG_C}")
458-
client.succeed(f"nix path-info {PKG_C}")
459+
client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['C']}")
460+
client.succeed(f"nix path-info {PKGS['C']}")
459461
460462
print(" ✓ Client downloaded package with mixed compression")
461463
@@ -465,9 +467,9 @@ in
465467
print("\n=== Testing Compression: disabled (default) ===")
466468
467469
store_url = make_s3_url(bucket)
468-
server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_A}")
470+
server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']}")
469471
470-
pkg_hash = get_package_hash(PKG_A)
472+
pkg_hash = get_package_hash(PKGS['A'])
471473
verify_no_compression(server, bucket, f"{pkg_hash}.narinfo")
472474
473475
print(" ✓ No compression applied by default")

0 commit comments

Comments
 (0)