|
83 | 83 | ENDPOINT = 'http://server:9000' |
84 | 84 | REGION = 'eu-west-1' |
85 | 85 |
|
86 | | - PKG_A = '${pkgA}' |
87 | | - PKG_B = '${pkgB}' |
88 | | - PKG_C = '${pkgC}' |
| 86 | + PKGS = { |
| 87 | + 'A': '${pkgA}', |
| 88 | + 'B': '${pkgB}', |
| 89 | + 'C': '${pkgC}', |
| 90 | + } |
89 | 91 |
|
90 | 92 | ENV_WITH_CREDS = f"AWS_ACCESS_KEY_ID={ACCESS_KEY} AWS_SECRET_ACCESS_KEY={SECRET_KEY}" |
91 | 93 |
|
|
101 | 103 | bucket_and_path = f"{bucket}{path}" if path else bucket |
102 | 104 | return f"s3://{bucket_and_path}?{query}" |
103 | 105 |
|
104 | | - def make_http_url(path): |
105 | | - """Build HTTP URL for direct S3 access""" |
106 | | - return f"{ENDPOINT}/{path}" |
107 | | -
|
108 | 106 | def get_package_hash(pkg_path): |
109 | 107 | """Extract store hash from package path""" |
110 | 108 | return pkg_path.split("/")[-1].split("-")[0] |
|
133 | 131 | print(output) |
134 | 132 | raise Exception(f"{error_msg}: expected {expected}, got {actual}") |
135 | 133 |
|
136 | | - def with_test_bucket(populate_with=[]): |
| 134 | + def verify_packages_in_store(machine, pkg_paths, should_exist=True): |
| 135 | + """ |
| 136 | + Verify whether packages exist in the store. |
| 137 | +
|
| 138 | + Args: |
| 139 | + machine: The machine to check on |
| 140 | + pkg_paths: List of package paths to check (or single path) |
| 141 | + should_exist: If True, verify packages exist; if False, verify they don't |
| 142 | + """ |
| 143 | + paths = [pkg_paths] if isinstance(pkg_paths, str) else pkg_paths |
| 144 | + for pkg in paths: |
| 145 | + if should_exist: |
| 146 | + machine.succeed(f"nix path-info {pkg}") |
| 147 | + else: |
| 148 | + machine.fail(f"nix path-info {pkg}") |
| 149 | +
|
| 150 | + def setup_s3(populate_bucket=[], public=False): |
137 | 151 | """ |
138 | 152 | Decorator that creates/destroys a unique bucket for each test. |
139 | 153 | Optionally pre-populates bucket with specified packages. |
| 154 | + Cleans up client store after test completion. |
140 | 155 |
|
141 | 156 | Args: |
142 | | - populate_with: List of packages to upload before test runs |
| 157 | + populate_bucket: List of packages to upload before test runs |
| 158 | + public: If True, make the bucket publicly accessible |
143 | 159 | """ |
144 | 160 | def decorator(test_func): |
145 | 161 | def wrapper(): |
146 | 162 | bucket = str(uuid.uuid4()) |
147 | 163 | server.succeed(f"mc mb minio/{bucket}") |
| 164 | + if public: |
| 165 | + server.succeed(f"mc anonymous set download minio/{bucket}") |
148 | 166 | try: |
149 | | - if populate_with: |
| 167 | + if populate_bucket: |
150 | 168 | store_url = make_s3_url(bucket) |
151 | | - for pkg in populate_with: |
| 169 | + for pkg in populate_bucket: |
152 | 170 | server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {pkg}") |
153 | 171 | test_func(bucket) |
154 | 172 | finally: |
155 | 173 | server.succeed(f"mc rb --force minio/{bucket}") |
| 174 | + # Clean up client store - only delete if path exists |
| 175 | + for pkg in PKGS.values(): |
| 176 | + client.succeed(f"[ ! -e {pkg} ] || nix store delete --ignore-liveness {pkg}") |
156 | 177 | return wrapper |
157 | 178 | return decorator |
158 | 179 |
|
159 | 180 | # ============================================================================ |
160 | 181 | # Test Functions |
161 | 182 | # ============================================================================ |
162 | 183 |
|
163 | | - @with_test_bucket() |
| 184 | + @setup_s3() |
164 | 185 | def test_credential_caching(bucket): |
165 | 186 | """Verify credential providers are cached and reused""" |
166 | 187 | print("\n=== Testing Credential Caching ===") |
167 | 188 |
|
168 | 189 | store_url = make_s3_url(bucket) |
169 | 190 | output = server.succeed( |
170 | 191 | f"{ENV_WITH_CREDS} nix copy --debug --to '{store_url}' " |
171 | | - f"{PKG_A} {PKG_B} {PKG_C} 2>&1" |
| 192 | + f"{PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1" |
172 | 193 | ) |
173 | 194 |
|
174 | 195 | assert_count( |
|
180 | 201 |
|
181 | 202 | print("✓ Credential provider created once and cached") |
182 | 203 |
|
183 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 204 | + @setup_s3(populate_bucket=[PKGS['A']]) |
184 | 205 | def test_fetchurl_basic(bucket): |
185 | 206 | """Test builtins.fetchurl works with s3:// URLs""" |
186 | 207 | print("\n=== Testing builtins.fetchurl ===") |
|
196 | 217 |
|
197 | 218 | print("✓ builtins.fetchurl works with s3:// URLs") |
198 | 219 |
|
199 | | - @with_test_bucket() |
| 220 | + @setup_s3() |
200 | 221 | def test_error_message_formatting(bucket): |
201 | 222 | """Verify error messages display URLs correctly""" |
202 | 223 | print("\n=== Testing Error Message Formatting ===") |
203 | 224 |
|
204 | 225 | nonexistent_url = make_s3_url(bucket, path="/foo-that-does-not-exist") |
205 | | - expected_http_url = make_http_url(f"{bucket}/foo-that-does-not-exist") |
| 226 | + expected_http_url = f"{ENDPOINT}/{bucket}/foo-that-does-not-exist" |
206 | 227 |
|
207 | 228 | error_msg = client.fail( |
208 | 229 | f"{ENV_WITH_CREDS} nix eval --impure --expr " |
|
216 | 237 |
|
217 | 238 | print("✓ Error messages format URLs correctly") |
218 | 239 |
|
219 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 240 | + @setup_s3(populate_bucket=[PKGS['A']]) |
220 | 241 | def test_fork_credential_preresolution(bucket): |
221 | 242 | """Test credential pre-resolution in forked processes""" |
222 | 243 | print("\n=== Testing Fork Credential Pre-resolution ===") |
|
246 | 267 | """.format(id=test_id, url=test_url, hash=cache_info_hash) |
247 | 268 |
|
248 | 269 | output = client.succeed( |
249 | | - f"{ENV_WITH_CREDS} nix build --debug --impure --expr '{fetchurl_expr}' 2>&1" |
| 270 | + f"{ENV_WITH_CREDS} nix build --debug --impure --no-link --expr '{fetchurl_expr}' 2>&1" |
250 | 271 | ) |
251 | 272 |
|
252 | 273 | # Verify fork behavior |
|
296 | 317 |
|
297 | 318 | print(" ✓ Child uses pre-resolved credentials (no new providers)") |
298 | 319 |
|
299 | | - @with_test_bucket(populate_with=[PKG_A, PKG_B, PKG_C]) |
| 320 | + @setup_s3(populate_bucket=[PKGS['A'], PKGS['B'], PKGS['C']]) |
300 | 321 | def test_store_operations(bucket): |
301 | 322 | """Test nix store info and copy operations""" |
302 | 323 | print("\n=== Testing Store Operations ===") |
|
316 | 337 | print(f" ✓ Store URL: {store_info['url']}") |
317 | 338 |
|
318 | 339 | # Test copy from store |
319 | | - client.fail(f"nix path-info {PKG_A}") |
| 340 | + verify_packages_in_store(client, PKGS['A'], should_exist=False) |
320 | 341 |
|
321 | 342 | output = client.succeed( |
322 | 343 | f"{ENV_WITH_CREDS} nix copy --debug --no-check-sigs " |
323 | | - f"--from '{store_url}' {PKG_A} {PKG_B} {PKG_C} 2>&1" |
| 344 | + f"--from '{store_url}' {PKGS['A']} {PKGS['B']} {PKGS['C']} 2>&1" |
324 | 345 | ) |
325 | 346 |
|
326 | 347 | assert_count( |
|
330 | 351 | "Client credential provider caching failed" |
331 | 352 | ) |
332 | 353 |
|
333 | | - client.succeed(f"nix path-info {PKG_A}") |
| 354 | + verify_packages_in_store(client, [PKGS['A'], PKGS['B'], PKGS['C']]) |
334 | 355 |
|
335 | 356 | print(" ✓ nix copy works") |
336 | 357 | print(" ✓ Credentials cached on client") |
337 | 358 |
|
338 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 359 | + @setup_s3(populate_bucket=[PKGS['A'], PKGS['B']], public=True) |
| 360 | + def test_public_bucket_operations(bucket): |
| 361 | + """Test store operations on public bucket without credentials""" |
| 362 | + print("\n=== Testing Public Bucket Operations ===") |
| 363 | +
|
| 364 | + store_url = make_s3_url(bucket) |
| 365 | +
|
| 366 | + # Verify store info works without credentials |
| 367 | + client.succeed(f"nix store info --store '{store_url}' >&2") |
| 368 | + print(" ✓ nix store info works without credentials") |
| 369 | +
|
| 370 | + # Get and validate store info JSON |
| 371 | + info_json = client.succeed(f"nix store info --json --store '{store_url}'") |
| 372 | + store_info = json.loads(info_json) |
| 373 | +
|
| 374 | + if not store_info.get("url"): |
| 375 | + raise Exception("Store should have a URL") |
| 376 | +
|
| 377 | + print(f" ✓ Store URL: {store_info['url']}") |
| 378 | +
|
| 379 | + # Verify packages are not yet in client store |
| 380 | + verify_packages_in_store(client, [PKGS['A'], PKGS['B']], should_exist=False) |
| 381 | +
|
| 382 | + # Test copy from public bucket without credentials |
| 383 | + client.succeed( |
| 384 | + f"nix copy --debug --no-check-sigs " |
| 385 | + f"--from '{store_url}' {PKGS['A']} {PKGS['B']} 2>&1" |
| 386 | + ) |
| 387 | +
|
| 388 | + # Verify packages were copied successfully |
| 389 | + verify_packages_in_store(client, [PKGS['A'], PKGS['B']]) |
| 390 | +
|
| 391 | + print(" ✓ nix copy from public bucket works without credentials") |
| 392 | +
|
| 393 | + @setup_s3(populate_bucket=[PKGS['A']]) |
339 | 394 | def test_url_format_variations(bucket): |
340 | 395 | """Test different S3 URL parameter combinations""" |
341 | 396 | print("\n=== Testing URL Format Variations ===") |
|
350 | 405 | client.succeed(f"{ENV_WITH_CREDS} nix store info --store '{url2}' >&2") |
351 | 406 | print(" ✓ Parameter order: endpoint before region works") |
352 | 407 |
|
353 | | - @with_test_bucket(populate_with=[PKG_A]) |
| 408 | + @setup_s3(populate_bucket=[PKGS['A']]) |
354 | 409 | def test_concurrent_fetches(bucket): |
355 | 410 | """Validate thread safety with concurrent S3 operations""" |
356 | 411 | print("\n=== Testing Concurrent Fetches ===") |
|
386 | 441 |
|
387 | 442 | try: |
388 | 443 | output = client.succeed( |
389 | | - f"{ENV_WITH_CREDS} nix build --debug --impure " |
| 444 | + f"{ENV_WITH_CREDS} nix build --debug --impure --no-link " |
390 | 445 | f"--expr '{concurrent_expr}' --max-jobs 5 2>&1" |
391 | 446 | ) |
392 | 447 | except: |
393 | 448 | output = client.fail( |
394 | | - f"{ENV_WITH_CREDS} nix build --debug --impure " |
| 449 | + f"{ENV_WITH_CREDS} nix build --debug --impure --no-link " |
395 | 450 | f"--expr '{concurrent_expr}' --max-jobs 5 2>&1" |
396 | 451 | ) |
397 | 452 |
|
|
412 | 467 | f"Expected 5 FileTransfer instances for 5 concurrent fetches, got {transfers_created}" |
413 | 468 | ) |
414 | 469 |
|
415 | | - @with_test_bucket() |
| 470 | + if providers_created != 1: |
| 471 | + print("Debug output:") |
| 472 | + print(output) |
| 473 | + raise Exception( |
| 474 | + f"Expected 1 credential provider for concurrent fetches, got {providers_created}" |
| 475 | + ) |
| 476 | +
|
| 477 | + @setup_s3() |
416 | 478 | def test_compression_narinfo_gzip(bucket): |
417 | 479 | """Test narinfo compression with gzip""" |
418 | 480 | print("\n=== Testing Compression: narinfo (gzip) ===") |
419 | 481 |
|
420 | 482 | store_url = make_s3_url(bucket, **{'narinfo-compression': 'gzip'}) |
421 | | - server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_B}") |
| 483 | + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['B']}") |
422 | 484 |
|
423 | | - pkg_hash = get_package_hash(PKG_B) |
| 485 | + pkg_hash = get_package_hash(PKGS['B']) |
424 | 486 | verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "gzip") |
425 | 487 |
|
426 | 488 | print(" ✓ .narinfo has Content-Encoding: gzip") |
427 | 489 |
|
428 | 490 | # Verify client can download and decompress |
429 | | - client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKG_B}") |
430 | | - client.succeed(f"nix path-info {PKG_B}") |
| 491 | + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['B']}") |
| 492 | + verify_packages_in_store(client, PKGS['B']) |
431 | 493 |
|
432 | 494 | print(" ✓ Client decompressed .narinfo successfully") |
433 | 495 |
|
434 | | - @with_test_bucket() |
| 496 | + @setup_s3() |
435 | 497 | def test_compression_mixed(bucket): |
436 | 498 | """Test mixed compression (narinfo=xz, ls=gzip)""" |
437 | 499 | print("\n=== Testing Compression: mixed (narinfo=xz, ls=gzip) ===") |
|
441 | 503 | **{'narinfo-compression': 'xz', 'write-nar-listing': 'true', 'ls-compression': 'gzip'} |
442 | 504 | ) |
443 | 505 |
|
444 | | - server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_C}") |
| 506 | + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['C']}") |
445 | 507 |
|
446 | | - pkg_hash = get_package_hash(PKG_C) |
| 508 | + pkg_hash = get_package_hash(PKGS['C']) |
447 | 509 |
|
448 | 510 | # Verify .narinfo has xz compression |
449 | 511 | verify_content_encoding(server, bucket, f"{pkg_hash}.narinfo", "xz") |
|
454 | 516 | print(" ✓ .ls has Content-Encoding: gzip") |
455 | 517 |
|
456 | 518 | # Verify client can download with mixed compression |
457 | | - client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKG_C}") |
458 | | - client.succeed(f"nix path-info {PKG_C}") |
| 519 | + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' --no-check-sigs {PKGS['C']}") |
| 520 | + verify_packages_in_store(client, PKGS['C']) |
459 | 521 |
|
460 | 522 | print(" ✓ Client downloaded package with mixed compression") |
461 | 523 |
|
462 | | - @with_test_bucket() |
| 524 | + @setup_s3() |
463 | 525 | def test_compression_disabled(bucket): |
464 | 526 | """Verify no compression by default""" |
465 | 527 | print("\n=== Testing Compression: disabled (default) ===") |
466 | 528 |
|
467 | 529 | store_url = make_s3_url(bucket) |
468 | | - server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKG_A}") |
| 530 | + server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']}") |
469 | 531 |
|
470 | | - pkg_hash = get_package_hash(PKG_A) |
| 532 | + pkg_hash = get_package_hash(PKGS['A']) |
471 | 533 | verify_no_compression(server, bucket, f"{pkg_hash}.narinfo") |
472 | 534 |
|
473 | 535 | print(" ✓ No compression applied by default") |
|
494 | 556 | test_error_message_formatting() |
495 | 557 | test_fork_credential_preresolution() |
496 | 558 | test_store_operations() |
| 559 | + test_public_bucket_operations() |
497 | 560 | test_url_format_variations() |
498 | 561 | test_concurrent_fetches() |
499 | 562 | test_compression_narinfo_gzip() |
|
0 commit comments