|
83 | 83 | ENDPOINT = 'http://server:9000' |
84 | 84 | REGION = 'eu-west-1' |
85 | 85 |
|
86 | | - PKG_A = '${pkgA}' |
87 | | - PKG_B = '${pkgB}' |
88 | | - PKG_C = '${pkgC}' |
| 86 | + PKGS = { |
| 87 | + 'A': '${pkgA}', |
| 88 | + 'B': '${pkgB}', |
| 89 | + 'C': '${pkgC}', |
| 90 | + } |
89 | 91 |
|
90 | 92 | ENV_WITH_CREDS = f"AWS_ACCESS_KEY_ID={ACCESS_KEY} AWS_SECRET_ACCESS_KEY={SECRET_KEY}" |
91 | 93 |
|
|
168 | 170 | store_url = make_s3_url(bucket) |
169 | 171 | output = server.succeed( |
170 | 172 | f"{ENV_WITH_CREDS} nix copy --debug --to '{store_url}' " |
171 | | - f"{PKG_A} {PKG_B} {PKG_C} 2>&1" |
| 173 | + f"{PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1" |
172 | 174 | ) |
173 | 175 |
|
174 | 176 | assert_count( |
|
180 | 182 |
|
181 | 183 | print("✓ Credential provider created once and cached") |
182 | 184 |
|
183 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 185 | + @with_test_bucket(populate_with=[PKGS['A']]) |
184 | 186 | def test_fetchurl_basic(bucket): |
185 | 187 | """Test builtins.fetchurl works with s3:// URLs""" |
186 | 188 | print("\n=== Testing builtins.fetchurl ===") |
|
216 | 218 |
|
217 | 219 | print("✓ Error messages format URLs correctly") |
218 | 220 |
|
219 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 221 | + @with_test_bucket(populate_with=[PKGS['A']]) |
220 | 222 | def test_fork_credential_preresolution(bucket): |
221 | 223 | """Test credential pre-resolution in forked processes""" |
222 | 224 | print("\n=== Testing Fork Credential Pre-resolution ===") |
|
296 | 298 |
|
297 | 299 | print(" ✓ Child uses pre-resolved credentials (no new providers)") |
298 | 300 |
|
299 | | - @with_test_bucket(populate_with=[PKG_A, PKG_B, PKG_C]) |
| 301 | + @with_test_bucket(populate_with=[PKGS['A'], PKGS['B'], PKGS['C']]) |
300 | 302 | def test_store_operations(bucket): |
301 | 303 | """Test nix store info and copy operations""" |
302 | 304 | print("\n=== Testing Store Operations ===") |
|
316 | 318 | print(f" ✓ Store URL: {store_info['url']}") |
317 | 319 |
|
318 | 320 | # Test copy from store |
319 | | - client.fail(f"nix path-info {PKG_A}") |
| 321 | + client.fail(f"nix path-info {PKGS['A']}") |
320 | 322 |
|
321 | 323 | output = client.succeed( |
322 | 324 | f"{ENV_WITH_CREDS} nix copy --debug --no-check-sigs " |
323 | | - f"--from '{store_url}' {PKG_A} {PKG_B} {PKG_C} 2>&1" |
| 325 | + f"--from '{store_url}' {PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1" |
324 | 326 | ) |
325 | 327 |
|
326 | 328 | assert_count( |
|
330 | 332 | "Client credential provider caching failed" |
331 | 333 | ) |
332 | 334 |
|
333 | | - client.succeed(f"nix path-info {PKG_A}") |
| 335 | + client.succeed(f"nix path-info {PKGS['A']}") |
334 | 336 |
|
335 | 337 | print(" ✓ nix copy works") |
336 | 338 | print(" ✓ Credentials cached on client") |
337 | 339 |
|
338 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 340 | + @with_test_bucket(populate_with=[PKGS['A']]) |
339 | 341 | def test_url_format_variations(bucket): |
340 | 342 | """Test different S3 URL parameter combinations""" |
341 | 343 | print("\n=== Testing URL Format Variations ===") |
|
350 | 352 | client.succeed(f"{ENV_WITH_CREDS} nix store info --store '{url2}' >&2") |
351 | 353 | print(" ✓ Parameter order: endpoint before region works") |
352 | 354 |
|
353 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 355 | + @with_test_bucket(populate_with=[PKGS['A']]) |
354 | 356 | def test_concurrent_fetches(bucket): |
355 | 357 | """Validate thread safety with concurrent S3 operations""" |
356 | 358 | print("\n=== Testing Concurrent Fetches ===") |
|
418 | 420 | print("\n=== Testing Compression: narinfo (gzip) ===") |
419 | 421 |
|
420 | 422 | store_url = make_s3_url(bucket, **{'narinfo-compression': 'gzip'}) |
421 | | - server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_B}") |
| 423 | + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['B']}") |
422 | 424 |
|
423 | | - pkg_hash = get_package_hash(PKG_B) |
| 425 | + pkg_hash = get_package_hash(PKGS['B']) |
424 | 426 | verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "gzip") |
425 | 427 |
|
426 | 428 | print(" ✓ .narinfo has Content-Encoding: gzip") |
427 | 429 |
|
428 | 430 | # Verify client can download and decompress |
429 | | - client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKG_B}") |
430 | | - client.succeed(f"nix path-info {PKG_B}") |
| 431 | + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['B']}") |
| 432 | + client.succeed(f"nix path-info {PKGS['B']}") |
431 | 433 |
|
432 | 434 | print(" ✓ Client decompressed .narinfo successfully") |
433 | 435 |
|
|
441 | 443 | **{'narinfo-compression': 'xz', 'write-nar-listing': 'true', 'ls-compression': 'gzip'} |
442 | 444 | ) |
443 | 445 |
|
444 | | - server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_C}") |
| 446 | + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['C']}") |
445 | 447 |
|
446 | | - pkg_hash = get_package_hash(PKG_C) |
| 448 | + pkg_hash = get_package_hash(PKGS['C']) |
447 | 449 |
|
448 | 450 | # Verify .narinfo has xz compression |
449 | 451 | verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "xz") |
|
454 | 456 | print(" ✓ .ls has Content-Encoding: gzip") |
455 | 457 |
|
456 | 458 | # Verify client can download with mixed compression |
457 | | - client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKG_C}") |
458 | | - client.succeed(f"nix path-info {PKG_C}") |
| 459 | + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['C']}") |
| 460 | + client.succeed(f"nix path-info {PKGS['C']}") |
459 | 461 |
|
460 | 462 | print(" ✓ Client downloaded package with mixed compression") |
461 | 463 |
|
|
465 | 467 | print("\n=== Testing Compression: disabled (default) ===") |
466 | 468 |
|
467 | 469 | store_url = make_s3_url(bucket) |
468 | | - server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_A}") |
| 470 | + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']}") |
469 | 471 |
|
470 | | - pkg_hash = get_package_hash(PKG_A) |
| 472 | + pkg_hash = get_package_hash(PKGS['A']) |
471 | 473 | verify_no_compression(server, bucket, f"{pkg_hash}.narinfo") |
472 | 474 |
|
473 | 475 | print(" ✓ No compression applied by default") |
|
0 commit comments