Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
# Changelog

## v2.155.0 (2023-05-15)

### Features

* Add support for SageMaker Serverless inference Provisioned Concurrency feature

### Bug Fixes and Other Changes

* Revert "fix: make RemoteExecutor context manager non-blocking on pend…
* Add BOM to no No P2 Availability region list

## v2.154.0 (2023-05-11)

### Features
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1 +1 @@
2.154.1.dev0
2.155.1.dev0
8 changes: 6 additions & 2 deletions requirements/extras/test_requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ awslogs==0.14.0
black==22.3.0
stopit==1.1.2
# Update tox.ini to have correct version of airflow constraints file
apache-airflow==2.5.1
apache-airflow==2.6.0
apache-airflow-providers-amazon==7.2.1
attrs==22.1.0
attrs>=23.1.0,<24
fabric==2.6.0
requests==2.27.1
sagemaker-experiments==0.1.35
Expand All @@ -23,3 +23,7 @@ pyvis==0.2.1
pandas>=1.3.5,<1.5
scikit-learn==1.0.2
cloudpickle==2.2.1
scipy==1.7.3
urllib3==1.26.8
docker>=5.0.2,<7.0.0
PyYAML==6.0
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def read_requirements(filename):

# Declare minimal set for installation
required_packages = [
"attrs>=20.3.0,<23",
"attrs>=23.1.0,<24",
"boto3>=1.26.131,<2.0",
"cloudpickle==2.2.1",
"google-pasta",
Expand All @@ -60,7 +60,7 @@ def read_requirements(filename):
"pandas",
"pathos",
"schema",
"PyYAML==5.4.1",
"PyYAML==6.0",
"jsonschema",
"platformdirs",
"tblib==1.7.0",
Expand All @@ -75,7 +75,7 @@ def read_requirements(filename):
# Meta dependency groups
extras["all"] = [item for group in extras.values() for item in group]
# Tests specific dependencies (do not need to be included in 'all')
extras["test"] = (extras["all"] + read_requirements("requirements/extras/test_requirements.txt"),)
extras["test"] = (read_requirements("requirements/extras/test_requirements.txt"),)

setup(
name="sagemaker",
Expand Down
4 changes: 3 additions & 1 deletion src/sagemaker/djl_inference/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -854,11 +854,13 @@ def generate_serving_properties(self, serving_properties=None) -> Dict[str, str]
if self.low_cpu_mem_usage:
serving_properties["option.low_cpu_mem_usage"] = self.low_cpu_mem_usage
# This is a workaround due to a bug in our built in handler for huggingface
# TODO: This needs to be fixed when new dlc is published
# TODO: Remove this logic whenever 0.20.0 image is out of service
if (
serving_properties["option.entryPoint"] == "djl_python.huggingface"
and self.dtype
and self.dtype != "auto"
and self.djl_version
and int(self.djl_version.split(".")[1]) < 21
):
serving_properties["option.dtype"] = "auto"
serving_properties.pop("option.load_in_8bit", None)
Expand Down
7 changes: 6 additions & 1 deletion src/sagemaker/experiments/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -633,7 +633,10 @@ def _extract_run_name_from_tc_name(trial_component_name: str, experiment_name: s
Returns:
str: The name of the Run object supplied by a user.
"""
return trial_component_name.replace("{}{}".format(experiment_name, DELIMITER), "", 1)
# TODO: we should revert the lower casting once backend fix reaches prod
return trial_component_name.replace(
"{}{}".format(experiment_name.lower(), DELIMITER), "", 1
)

@staticmethod
def _append_run_tc_label_to_tags(tags: Optional[List[Dict[str, str]]] = None) -> list:
Expand Down Expand Up @@ -869,6 +872,8 @@ def list_runs(
Returns:
list: A list of ``Run`` objects.
"""

# all trial components retrieved by default
tc_summaries = _TrialComponent.list(
experiment_name=experiment_name,
created_before=created_before,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,10 +145,10 @@ def right_size(
)

if endpoint_configurations or traffic_pattern or stopping_conditions or resource_limit:
LOGGER.info("Advance Job parameters were specified. Running Advanced job...")
LOGGER.info("Advanced Job parameters were specified. Running Advanced job...")
job_type = "Advanced"
else:
LOGGER.info("Advance Job parameters were not specified. Running Default job...")
LOGGER.info("Advanced Job parameters were not specified. Running Default job...")
job_type = "Default"

self._init_sagemaker_session_if_does_not_exist()
Expand Down
12 changes: 9 additions & 3 deletions src/sagemaker/remote_function/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,7 @@ def wrapper(*args, **kwargs):
s3_uri=s3_path_join(
job_settings.s3_root_uri, job.job_name, EXCEPTION_FOLDER
),
hmac_key=job.hmac_key,
)
except ServiceError as serr:
chained_e = serr.__cause__
Expand Down Expand Up @@ -337,6 +338,7 @@ def wrapper(*args, **kwargs):
return serialization.deserialize_obj_from_s3(
sagemaker_session=job_settings.sagemaker_session,
s3_uri=s3_path_join(job_settings.s3_root_uri, job.job_name, RESULTS_FOLDER),
hmac_key=job.hmac_key,
)

if job.describe()["TrainingJobStatus"] == "Stopped":
Expand Down Expand Up @@ -745,7 +747,7 @@ def map(self, func, *iterables):
futures = map(self.submit, itertools.repeat(func), *iterables)
return [future.result() for future in futures]

def shutdown(self, wait=True):
def shutdown(self):
"""Prevent more function executions to be submitted to this executor."""
with self._state_condition:
self._shutdown = True
Expand All @@ -756,15 +758,15 @@ def shutdown(self, wait=True):
self._state_condition.notify_all()

if self._workers is not None:
self._workers.shutdown(wait)
self._workers.shutdown(wait=True)

def __enter__(self):
"""Create an executor instance and return it"""
return self

def __exit__(self, exc_type, exc_val, exc_tb):
"""Make sure the executor instance is shutdown."""
self.shutdown(wait=False)
self.shutdown()
return False

@staticmethod
Expand Down Expand Up @@ -861,6 +863,7 @@ def from_describe_response(describe_training_job_response, sagemaker_session):
job_return = serialization.deserialize_obj_from_s3(
sagemaker_session=sagemaker_session,
s3_uri=s3_path_join(job.s3_uri, RESULTS_FOLDER),
hmac_key=job.hmac_key,
)
except DeserializationError as e:
client_exception = e
Expand All @@ -872,6 +875,7 @@ def from_describe_response(describe_training_job_response, sagemaker_session):
job_exception = serialization.deserialize_exception_from_s3(
sagemaker_session=sagemaker_session,
s3_uri=s3_path_join(job.s3_uri, EXCEPTION_FOLDER),
hmac_key=job.hmac_key,
)
except ServiceError as serr:
chained_e = serr.__cause__
Expand Down Expand Up @@ -961,6 +965,7 @@ def result(self, timeout: float = None) -> Any:
self._return = serialization.deserialize_obj_from_s3(
sagemaker_session=self._job.sagemaker_session,
s3_uri=s3_path_join(self._job.s3_uri, RESULTS_FOLDER),
hmac_key=self._job.hmac_key,
)
self._state = _FINISHED
return self._return
Expand All @@ -969,6 +974,7 @@ def result(self, timeout: float = None) -> Any:
self._exception = serialization.deserialize_exception_from_s3(
sagemaker_session=self._job.sagemaker_session,
s3_uri=s3_path_join(self._job.s3_uri, EXCEPTION_FOLDER),
hmac_key=self._job.hmac_key,
)
except ServiceError as serr:
chained_e = serr.__cause__
Expand Down
Loading