Skip to content

Commit aaab2da

Browse files
authored
Merge branch 'master' into processing-job-codeartifact-support
2 parents f5d5fe7 + 47e6288 commit aaab2da

File tree

106 files changed

+740
-551
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

106 files changed

+740
-551
lines changed

CHANGELOG.md

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,29 @@
11
# Changelog
22

3+
## v2.216.0 (2024-04-17)
4+
5+
### Features
6+
7+
* optimum 0.0.21
8+
* Add TF 2.14 Graviton Inference support
9+
* JumpStart alternative config parsing
10+
* TGI 1.4.5
11+
12+
### Bug Fixes and Other Changes
13+
14+
* chore(deps): bump black from 22.3.0 to 24.3.0 in /requirements/extras
15+
* Add back serialization for automatic speech recognition
16+
* bump apache-airflow version to 2.8.4
17+
* remove trailing slash when uploading to S3 with dataset_builder.to_csv_file
18+
* Update Collaborator Check workflow to check for users which are part of collaborator team
19+
* forward network_isolation parameter to Estimators when False
20+
* Flaky slow test
21+
* Revert "Test SM PySDK Variations"
22+
23+
### Documentation Changes
24+
25+
* Add supported task types to schema builder omission
26+
327
## v2.215.0 (2024-04-12)
428

529
### Features

VERSION

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
2.215.1.dev0
1+
2.216.1.dev0

requirements/extras/test_requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@ coverage>=5.2, <6.2
99
mock==4.0.3
1010
contextlib2==21.6.0
1111
awslogs==0.14.0
12-
black==22.3.0
12+
black==24.3.0
1313
stopit==1.1.2
1414
# Update tox.ini to have correct version of airflow constraints file
15-
apache-airflow==2.8.3
15+
apache-airflow==2.8.4
1616
apache-airflow-providers-amazon==7.2.1
1717
attrs>=23.1.0,<24
1818
fabric==2.6.0

src/sagemaker/amazon/record_pb2.py

Lines changed: 8 additions & 8 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/sagemaker/automl/automl.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -930,9 +930,9 @@ def _load_config(cls, inputs, auto_ml, expand_role=True, validate_uri=True):
930930

931931
auto_ml_model_deploy_config = {}
932932
if auto_ml.auto_generate_endpoint_name is not None:
933-
auto_ml_model_deploy_config[
934-
"AutoGenerateEndpointName"
935-
] = auto_ml.auto_generate_endpoint_name
933+
auto_ml_model_deploy_config["AutoGenerateEndpointName"] = (
934+
auto_ml.auto_generate_endpoint_name
935+
)
936936
if not auto_ml.auto_generate_endpoint_name and auto_ml.endpoint_name is not None:
937937
auto_ml_model_deploy_config["EndpointName"] = auto_ml.endpoint_name
938938

@@ -1034,9 +1034,9 @@ def _prepare_auto_ml_stop_condition(
10341034
if max_candidates is not None:
10351035
stopping_condition["MaxCandidates"] = max_candidates
10361036
if max_runtime_per_training_job_in_seconds is not None:
1037-
stopping_condition[
1038-
"MaxRuntimePerTrainingJobInSeconds"
1039-
] = max_runtime_per_training_job_in_seconds
1037+
stopping_condition["MaxRuntimePerTrainingJobInSeconds"] = (
1038+
max_runtime_per_training_job_in_seconds
1039+
)
10401040
if total_job_runtime_in_seconds is not None:
10411041
stopping_condition["MaxAutoMLJobRuntimeInSeconds"] = total_job_runtime_in_seconds
10421042

src/sagemaker/automl/automlv2.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1446,9 +1446,9 @@ def _load_config(cls, inputs, auto_ml, expand_role=True):
14461446

14471447
auto_ml_model_deploy_config = {}
14481448
if auto_ml.auto_generate_endpoint_name is not None:
1449-
auto_ml_model_deploy_config[
1450-
"AutoGenerateEndpointName"
1451-
] = auto_ml.auto_generate_endpoint_name
1449+
auto_ml_model_deploy_config["AutoGenerateEndpointName"] = (
1450+
auto_ml.auto_generate_endpoint_name
1451+
)
14521452
if not auto_ml.auto_generate_endpoint_name and auto_ml.endpoint_name is not None:
14531453
auto_ml_model_deploy_config["EndpointName"] = auto_ml.endpoint_name
14541454

src/sagemaker/base_serializers.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -397,6 +397,8 @@ def serialize(self, data):
397397
raise ValueError(f"Could not open/read file: {data}. {e}")
398398
if isinstance(data, bytes):
399399
return data
400+
if isinstance(data, dict) and "data" in data:
401+
return self.serialize(data["data"])
400402

401403
raise ValueError(f"Object of type {type(data)} is not Data serializable.")
402404

src/sagemaker/collection.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -377,9 +377,11 @@ def _convert_group_resource_response(
377377
{
378378
"Name": collection_name,
379379
"Arn": collection_arn,
380-
"Type": resource_group["Identifier"]["ResourceType"]
381-
if is_model_group
382-
else "Collection",
380+
"Type": (
381+
resource_group["Identifier"]["ResourceType"]
382+
if is_model_group
383+
else "Collection"
384+
),
383385
}
384386
)
385387
return collection_details

src/sagemaker/debugger/profiler_config.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -162,19 +162,19 @@ def _to_request_dict(self):
162162
profiler_config_request["DisableProfiler"] = self.disable_profiler
163163

164164
if self.system_monitor_interval_millis is not None:
165-
profiler_config_request[
166-
"ProfilingIntervalInMilliseconds"
167-
] = self.system_monitor_interval_millis
165+
profiler_config_request["ProfilingIntervalInMilliseconds"] = (
166+
self.system_monitor_interval_millis
167+
)
168168

169169
if self.framework_profile_params is not None:
170-
profiler_config_request[
171-
"ProfilingParameters"
172-
] = self.framework_profile_params.profiling_parameters
170+
profiler_config_request["ProfilingParameters"] = (
171+
self.framework_profile_params.profiling_parameters
172+
)
173173

174174
if self.profile_params is not None:
175-
profiler_config_request[
176-
"ProfilingParameters"
177-
] = self.profile_params.profiling_parameters
175+
profiler_config_request["ProfilingParameters"] = (
176+
self.profile_params.profiling_parameters
177+
)
178178

179179
return profiler_config_request
180180

src/sagemaker/djl_inference/model.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -213,9 +213,7 @@ def _create_estimator(
213213
vpc_config: Optional[
214214
Dict[
215215
str,
216-
List[
217-
str,
218-
],
216+
List[str],
219217
]
220218
] = None,
221219
volume_kms_key=None,
@@ -820,9 +818,9 @@ def _get_container_env(self):
820818
logger.warning("Ignoring invalid container log level: %s", self.container_log_level)
821819
return self.env
822820

823-
self.env[
824-
"SERVING_OPTS"
825-
] = f'"-Dai.djl.logging.level={_LOG_LEVEL_MAP[self.container_log_level]}"'
821+
self.env["SERVING_OPTS"] = (
822+
f'"-Dai.djl.logging.level={_LOG_LEVEL_MAP[self.container_log_level]}"'
823+
)
826824
return self.env
827825

828826

0 commit comments

Comments
 (0)