|
13 | 13 | logger.setLevel(logging.DEBUG) |
14 | 14 |
|
15 | 15 |
|
| 16 | +@pytest.mark.parametrize("length", [-1, 1, 2, 3, 6, 9, 12]) |
| 17 | +@pytest.mark.parametrize( |
| 18 | + "seq", |
| 19 | + [ |
| 20 | + (1, 6, 9, 4, 0, 3, 5, 7, 8, 2), |
| 21 | + (8, 9, 0, 4, 2, 1, 7, 5, 6, 3), |
| 22 | + (1, 2, 8, 9, 3, 0, 6, 5, 7, 4), |
| 23 | + (4, 7, 6, 3, 2, 0, 5, 8, 1, 9), |
| 24 | + (6, 9, 8, 3, 1, 0, 4, 2, 5, 7), |
| 25 | + (8, 9, 4, 6, 7, 5, 0, 2, 3, 1), |
| 26 | + (8, 3, 6, 2, 9, 7, 4, 1, 0, 5), |
| 27 | + (7, 8, 1, 3, 2, 9, 6, 0, 4, 5), |
| 28 | + (2, 4, 8, 9, 6, 0, 3, 1, 7, 5), |
| 29 | + (9, 5, 3, 2, 0, 6, 1, 8, 7, 4), |
| 30 | + (0, 9, 1, 3, 7, 4, 8, 6, 5, 2), |
| 31 | + ], |
| 32 | +) |
| 33 | +def test_read_one_shot(path, seq, length): |
| 34 | + client_s3 = boto3.client("s3") |
| 35 | + path = f"{path}0.txt" |
| 36 | + bucket, key = wr._utils.parse_path(path) |
| 37 | + text = "0123456789" |
| 38 | + client_s3.put_object(Body=text, Bucket=bucket, Key=key) |
| 39 | + fs = s3fs.S3FileSystem() |
| 40 | + with fs.open(path, "rb") as f: |
| 41 | + with open_s3_object(path, mode="rb", s3_block_size=-1, use_threads=True) as s3obj: |
| 42 | + for i in seq: |
| 43 | + s3obj.seek(i) |
| 44 | + f.seek(i) |
| 45 | + data = s3obj.read(length) |
| 46 | + assert data[0:1] == text[i].encode("utf-8") |
| 47 | + assert data == f.read(length) |
| 48 | + logger.debug(s3obj._cache) |
| 49 | + assert len(s3obj._cache) == s3obj._size |
| 50 | + assert s3obj._cache == b"" |
| 51 | + |
| 52 | + |
16 | 53 | @pytest.mark.parametrize("use_threads", [True, False]) |
17 | 54 | @pytest.mark.parametrize("block_size", list(range(3, 10)) + [-1]) |
18 | 55 | @pytest.mark.parametrize("length", list(range(1, 10))) |
|
0 commit comments