Skip to content

Commit 06da755

Browse files
Merge branch 'develop' of github.com:NHSDigital/nhs-aws-helpers into feature/dh-MESH-2092-deps
2 parents 077d2d3 + fcb557d commit 06da755

22 files changed

+1110
-1061
lines changed

.github/workflows/merge-develop.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ jobs:
3434
with:
3535
path: |
3636
.venv
37-
key: ${{ runner.os }}-poetry-v2-py3.8-${{ hashFiles('./poetry.lock') }}
37+
key: ${{ runner.os }}-poetry-v2-py3.9-${{ hashFiles('./poetry.lock') }}
3838

3939
- name: git reset
4040
run: git reset --hard

.github/workflows/pull-request.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ jobs:
1212
tox:
1313
strategy:
1414
matrix:
15-
python-version: ["3.8", "3.9", "3.10", "3.11"]
15+
python-version: ["3.9", "3.10", "3.11"]
1616

1717
runs-on: ubuntu-latest
1818
if: github.repository == 'NHSDigital/nhs-aws-helpers'
@@ -114,7 +114,7 @@ jobs:
114114
with:
115115
path: |
116116
.venv
117-
key: ${{ runner.os }}-poetry-v2-py3.8-${{ hashFiles('./poetry.lock') }}
117+
key: ${{ runner.os }}-poetry-v2-py3.9-${{ hashFiles('./poetry.lock') }}
118118

119119
- name: git reset
120120
run: git reset --hard
@@ -171,7 +171,7 @@ jobs:
171171

172172
- name: archive reports
173173
if: success() || failure()
174-
uses: actions/upload-artifact@v3
174+
uses: actions/upload-artifact@v4
175175
with:
176176
name: reports
177177
path: reports/**/*
@@ -231,7 +231,7 @@ jobs:
231231
with:
232232
path: |
233233
.venv
234-
key: ${{ runner.os }}-poetry-v2-py3.8-${{ hashFiles('./poetry.lock') }}
234+
key: ${{ runner.os }}-poetry-v2-py3.9-${{ hashFiles('./poetry.lock') }}
235235

236236
- name: git reset
237237
run: git reset --hard

.tool-versions

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
11
poetry 1.8.5
2-
python 3.10.12 3.8.12 3.9.12 3.11.5
2+
python 3.10.12 3.9.12 3.11.5

.vscode/settings.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"editor.formatOnSave": true,
33
"[python]": {
44
"editor.codeActionsOnSave": {
5-
"source.organizeImports": true
5+
"source.organizeImports": "explicit"
66
}
77
},
88
"isort.args": [

nhs_aws_helpers/__init__.py

Lines changed: 22 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -7,22 +7,15 @@
77
import random
88
import re
99
import time
10+
from collections.abc import AsyncGenerator, Generator, Iterable, Sequence
1011
from concurrent.futures import ThreadPoolExecutor, as_completed
1112
from functools import partial, reduce, wraps
1213
from typing import (
1314
IO,
1415
Any,
15-
AsyncGenerator,
1616
Callable,
17-
Dict,
18-
Generator,
19-
Iterable,
20-
List,
2117
Literal,
2218
Optional,
23-
Sequence,
24-
Tuple,
25-
Type,
2619
TypeVar,
2720
Union,
2821
cast,
@@ -86,15 +79,15 @@ def s3_build_uri(bucket: str, key: str) -> str:
8679
return f"s3://{bucket}/{key}"
8780

8881

89-
def s3_split_path(s3uri: str) -> Tuple[str, str, str]:
82+
def s3_split_path(s3uri: str) -> tuple[str, str, str]:
9083
match = s3re.match(s3uri)
9184
if not match:
9285
raise ValueError(f"Not a s3 uri: {s3uri}")
9386
scheme, bucket, key = match.groups()
9487
return scheme, bucket, key
9588

9689

97-
_default_configs: Dict[str, Config] = {}
90+
_default_configs: dict[str, Config] = {}
9891

9992
_pre_configure: Optional[Callable[[str, str, Optional[Config]], Optional[Config]]] = None
10093

@@ -322,9 +315,9 @@ def secret_binary_value(name: str, session: Optional[Session] = None, config: Op
322315

323316
def ssm_parameter(
324317
name: str, decrypt=False, session: Optional[Session] = None, config: Optional[Config] = None
325-
) -> Union[str, List[str]]:
318+
) -> Union[str, list[str]]:
326319
ssm = ssm_client(session=session, config=config)
327-
value = cast(Union[str, List[str]], ssm.get_parameter(Name=name, WithDecryption=decrypt)["Parameter"]["Value"])
320+
value = cast(Union[str, list[str]], ssm.get_parameter(Name=name, WithDecryption=decrypt)["Parameter"]["Value"])
328321
return value
329322

330323

@@ -359,7 +352,7 @@ def s3_get_tags(
359352
session: Optional[Session] = None,
360353
config: Optional[Config] = None,
361354
client: Optional[S3Client] = None,
362-
) -> Dict[str, str]:
355+
) -> dict[str, str]:
363356
client = client or s3_resource(session=session, config=config).meta.client
364357
result = client.get_object_tagging(Bucket=bucket, Key=key)
365358
tags = {pair["Key"]: pair["Value"] for pair in result["TagSet"]}
@@ -369,7 +362,7 @@ def s3_get_tags(
369362
def s3_replace_tags(
370363
bucket: str,
371364
key: str,
372-
tags: Dict[str, str],
365+
tags: dict[str, str],
373366
session: Optional[Session] = None,
374367
config: Optional[Config] = None,
375368
client: Optional[S3Client] = None,
@@ -384,11 +377,11 @@ def s3_replace_tags(
384377
def s3_update_tags(
385378
bucket: str,
386379
key: str,
387-
tags: Dict[str, str],
380+
tags: dict[str, str],
388381
session: Optional[Session] = None,
389382
config: Optional[Config] = None,
390383
client: Optional[S3Client] = None,
391-
) -> Dict[str, str]:
384+
) -> dict[str, str]:
392385
client = client or s3_resource(session=session, config=config).meta.client
393386

394387
result = client.get_object_tagging(Bucket=bucket, Key=key)
@@ -407,7 +400,7 @@ def s3_get_all_keys(
407400
session: Optional[Session] = None,
408401
config: Optional[Config] = None,
409402
client: Optional[S3Client] = None,
410-
) -> List[str]:
403+
) -> list[str]:
411404
client = client or s3_resource(session=session, config=config).meta.client
412405
paginator = client.get_paginator("list_objects_v2")
413406
page_iterator = paginator.paginate(Bucket=bucket, Prefix=prefix)
@@ -442,7 +435,7 @@ def s3_delete_keys(
442435

443436

444437
def s3_delete_versioned_keys(
445-
keys: Iterable[Tuple[str, str]], bucket: str, session: Optional[Session] = None, config: Optional[Config] = None
438+
keys: Iterable[tuple[str, str]], bucket: str, session: Optional[Session] = None, config: Optional[Config] = None
446439
):
447440
# delete specific versions, rather than deleting "objects" and adding delete_marker
448441
buck = s3_bucket(bucket, session=session, config=config)
@@ -512,7 +505,8 @@ def filter_items(items: Sequence[Union[DeleteMarkerEntryTypeDef, ObjectVersionTy
512505

513506
for i in range(0, len(version_list), 1000):
514507
response = client.delete_objects(
515-
Bucket=bucket, Delete={"Objects": version_list[i : i + 1000], "Quiet": True} # type: ignore[typeddict-item]
508+
Bucket=bucket,
509+
Delete={"Objects": version_list[i : i + 1000], "Quiet": True}, # type: ignore[typeddict-item]
516510
)
517511
print(response)
518512

@@ -608,7 +602,7 @@ def s3_put(bucket: Bucket, key: str, body: Union[bytes, str], encoding: str = "u
608602
return obj
609603

610604

611-
def s3_list_prefixes(s3_path: str, session: Optional[Session] = None, config: Optional[Config] = None) -> List[str]:
605+
def s3_list_prefixes(s3_path: str, session: Optional[Session] = None, config: Optional[Config] = None) -> list[str]:
612606
_na, bucket, prefix = s3_split_path(s3_path)
613607

614608
if not prefix.endswith("/"):
@@ -619,7 +613,7 @@ def s3_list_prefixes(s3_path: str, session: Optional[Session] = None, config: Op
619613
return [o["Prefix"].replace(prefix, "").strip("/") for o in result.get("CommonPrefixes", [])]
620614

621615

622-
def s3_list_folders(bucket_name: str, bucket_prefix: str, page_size: int = 100) -> List[str]:
616+
def s3_list_folders(bucket_name: str, bucket_prefix: str, page_size: int = 100) -> list[str]:
623617
paginator = s3_client().get_paginator("list_objects")
624618
folders = []
625619
iterator = paginator.paginate(
@@ -656,7 +650,7 @@ def s3_get_size(
656650

657651

658652
def s3_upload_multipart_from_copy(
659-
obj: Object, parts_keys: Sequence[str], executor_type: Type[ThreadPoolExecutor] = ThreadPoolExecutor, **kwargs
653+
obj: Object, parts_keys: Sequence[str], executor_type: type[ThreadPoolExecutor] = ThreadPoolExecutor, **kwargs
660654
):
661655
multipart_upload = obj.initiate_multipart_upload(**kwargs)
662656

@@ -889,7 +883,7 @@ async def _async_wrapper(*args, **kwargs):
889883
@dynamodb_retry_backoff()
890884
def get_items_batched(
891885
ddb_table_name: str,
892-
keys: List[Dict[str, Any]],
886+
keys: list[dict[str, Any]],
893887
client: Optional[DynamoDBClient] = None,
894888
session: Optional[Session] = None,
895889
config: Optional[Config] = None,
@@ -905,12 +899,12 @@ def get_items_batched(
905899

906900
def ddb_get_items(
907901
ddb_table_name: str,
908-
keys: List[Dict[str, Any]],
902+
keys: list[dict[str, Any]],
909903
client: Optional[DynamoDBClient] = None,
910904
session: Optional[Session] = None,
911905
config: Optional[Config] = None,
912906
**kwargs,
913-
) -> List[Dict[str, Any]]:
907+
) -> list[dict[str, Any]]:
914908
client = client or cast(DynamoDBClient, dynamodb(session=session, config=config).meta.client)
915909
result = []
916910
remaining = keys
@@ -1106,7 +1100,7 @@ async def async_stream_from_s3(
11061100
_RE_CANCELLATION_REASONS = re.compile(r"^.+\[(\w+[^\]]*?)\]$")
11071101

11081102

1109-
def transaction_cancellation_reasons(err: ClientError) -> List[str]:
1103+
def transaction_cancellation_reasons(err: ClientError) -> list[str]:
11101104
"""
11111105
get cancellation reasons as strings .. e.g. ['None', 'ConditionalCheckFailed', 'None']
11121106
"""
@@ -1121,7 +1115,7 @@ def transaction_cancellation_reasons(err: ClientError) -> List[str]:
11211115
return [reason.strip() for reason in match.group(1).split(",")]
11221116

11231117

1124-
def transaction_cancelled_for_only_reasons(err: ClientError, *match_reason: str) -> List[str]:
1118+
def transaction_cancelled_for_only_reasons(err: ClientError, *match_reason: str) -> list[str]:
11251119
"""
11261120
returns all reasons ... if all reasons either match match_reason or 'None'
11271121
"""
@@ -1130,5 +1124,5 @@ def transaction_cancelled_for_only_reasons(err: ClientError, *match_reason: str)
11301124
return reasons if all(reason in match_reasons for reason in reasons) else []
11311125

11321126

1133-
def cancellation_reasons_if_conditional_check(err: ClientError) -> List[str]:
1127+
def cancellation_reasons_if_conditional_check(err: ClientError) -> list[str]:
11341128
return transaction_cancelled_for_only_reasons(err, "ConditionalCheckFailed")

nhs_aws_helpers/async_s3_object_reader.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ async def read(self, num: Optional[int] = None) -> Union[str, bytes]:
123123
body = await self._maybe_get_body()
124124

125125
if num is None:
126-
chunk = body.read()
126+
chunk = cast(Union[str, bytes], body.read())
127127
if self._encoding:
128128
chunk = self.ensure_valid_chunk(chunk)
129129
else:

nhs_aws_helpers/async_s3_object_writer.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import asyncio
22
from asyncio import Task
3-
from typing import Any, Final, Iterator, List, Mapping, Optional, Union
3+
from collections.abc import Iterator, Mapping
4+
from typing import Any, Final, Optional, Union
45

56
from mypy_boto3_s3.service_resource import MultipartUpload, Object
67

@@ -28,8 +29,8 @@ def __init__(
2829
self._encoding = encoding
2930
self._buffer = b""
3031
self._bytes_written, self._position = (0, 0)
31-
self._upload_tasks: List[Task] = []
32-
self._upload_errors: List[Exception] = []
32+
self._upload_tasks: list[Task] = []
33+
self._upload_errors: list[Exception] = []
3334
self._buffer_size = buffer_size
3435
self._multipart_upload: Optional[MultipartUpload] = None
3536
self._name = name
@@ -95,7 +96,7 @@ def read(self, num_bytes: int = ...) -> Union[str, bytes]:
9596
def readline(self, limit: int = ...) -> Union[str, bytes]:
9697
raise NotImplementedError
9798

98-
def readlines(self, hint: int = ...) -> List[Union[str, bytes]]:
99+
def readlines(self, hint: int = ...) -> list[Union[str, bytes]]:
99100
raise NotImplementedError
100101

101102
def seek(self, offset: int, whence: int = ...) -> int:
@@ -107,7 +108,7 @@ def __next__(self) -> Union[str, bytes]:
107108
def __iter__(self) -> Iterator[Union[str, bytes]]:
108109
raise NotImplementedError
109110

110-
async def writelines(self, lines: List[Union[str, bytes]]) -> None:
111+
async def writelines(self, lines: list[Union[str, bytes]]) -> None:
111112
for arg in lines:
112113
await self.write(arg)
113114

nhs_aws_helpers/dynamodb_model_store/base_model.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import inspect
2+
from collections.abc import Mapping
23
from dataclasses import fields, is_dataclass
34
from functools import lru_cache
4-
from typing import Any, Dict, Generic, List, Mapping, Tuple, Type, TypeVar, cast
5+
from typing import Any, Generic, TypeVar, cast
56

67
TModelKey = TypeVar("TModelKey", bound=Mapping[str, Any])
78

@@ -11,7 +12,7 @@ class serialised_property(property):
1112

1213

1314
class BaseModel(Generic[TModelKey]):
14-
_model_key_type: Type[TModelKey]
15+
_model_key_type: type[TModelKey]
1516

1617
@serialised_property
1718
def model_type(self) -> str:
@@ -21,7 +22,7 @@ def model_key(self) -> TModelKey:
2122
return cast(TModelKey, {k: getattr(self, k) for k in self.model_key_fields()})
2223

2324
@classmethod
24-
def model_key_fields(cls) -> List[str]:
25+
def model_key_fields(cls) -> list[str]:
2526
model_key_fields = _MODEL_KEY_FIELDS.get(cls._model_key_type)
2627
if not model_key_fields:
2728
model_key_fields = list(cls._model_key_type.__annotations__.keys())
@@ -31,16 +32,16 @@ def model_key_fields(cls) -> List[str]:
3132
return model_key_fields
3233

3334
@classmethod
34-
def model_key_from_item(cls, item: Dict[str, Any]) -> TModelKey:
35+
def model_key_from_item(cls, item: dict[str, Any]) -> TModelKey:
3536
return cast(TModelKey, {k: item.get(k) for k in cls.model_key_fields()})
3637

3738

38-
_MODEL_KEY_FIELDS: Dict[type, List[str]] = {}
39+
_MODEL_KEY_FIELDS: dict[type, list[str]] = {}
3940

4041

4142
@lru_cache
42-
def model_properties_cache(model_type: Type[BaseModel]) -> List[Tuple[str, type, Mapping[str, Any]]]:
43-
model_fields: List[Tuple[str, type, Mapping[str, Any]]] = []
43+
def model_properties_cache(model_type: type[BaseModel]) -> list[tuple[str, type, Mapping[str, Any]]]:
44+
model_fields: list[tuple[str, type, Mapping[str, Any]]] = []
4445

4546
if is_dataclass(model_type):
4647
model_fields.extend([(field.name, field.type, field.metadata) for field in fields(model_type)])

0 commit comments

Comments
 (0)