Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/sagemaker/estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2119,7 +2119,7 @@ def _get_instance_type(self):
instance_type = instance_group.instance_type
if is_pipeline_variable(instance_type):
continue
match = re.match(r"^ml[\._]([a-z\d]+)\.?\w*$", instance_type)
match = re.match(r"^ml[\._]([a-z\d\-]+)\.?\w*$", instance_type)

if match:
family = match[1]
Expand Down
8 changes: 4 additions & 4 deletions src/sagemaker/fw_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -962,7 +962,7 @@ def validate_distribution_for_instance_type(instance_type, distribution):
"""
err_msg = ""
if isinstance(instance_type, str):
match = re.match(r"^ml[\._]([a-z\d]+)\.?\w*$", instance_type)
match = re.match(r"^ml[\._]([a-z\d\-]+)\.?\w*$", instance_type)
if match and match[1].startswith("trn"):
keys = list(distribution.keys())
if len(keys) == 0:
Expand Down Expand Up @@ -1083,7 +1083,7 @@ def _is_gpu_instance(instance_type):
bool: Whether or not the instance_type supports GPU
"""
if isinstance(instance_type, str):
match = re.match(r"^ml[\._]([a-z\d]+)\.?\w*$", instance_type)
match = re.match(r"^ml[\._]([a-z\d\-]+)\.?\w*$", instance_type)
if match:
if match[1].startswith("p") or match[1].startswith("g"):
return True
Expand All @@ -1102,7 +1102,7 @@ def _is_trainium_instance(instance_type):
bool: Whether or not the instance_type is a Trainium instance
"""
if isinstance(instance_type, str):
match = re.match(r"^ml[\._]([a-z\d]+)\.?\w*$", instance_type)
match = re.match(r"^ml[\._]([a-z\d\-]+)\.?\w*$", instance_type)
if match and match[1].startswith("trn"):
return True
return False
Expand Down Expand Up @@ -1149,7 +1149,7 @@ def _instance_type_supports_profiler(instance_type):
bool: Whether or not the region supports Amazon SageMaker Debugger profiling feature.
"""
if isinstance(instance_type, str):
match = re.match(r"^ml[\._]([a-z\d]+)\.?\w*$", instance_type)
match = re.match(r"^ml[\._]([a-z\d\-]+)\.?\w*$", instance_type)
if match and match[1].startswith("trn"):
return True
return False
Expand Down
2 changes: 1 addition & 1 deletion src/sagemaker/serve/utils/optimize_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def _is_inferentia_or_trainium(instance_type: Optional[str]) -> bool:
bool: Whether the given instance type is Inferentia or Trainium.
"""
if isinstance(instance_type, str):
match = re.match(r"^ml[\._]([a-z\d]+)\.?\w*$", instance_type)
match = re.match(r"^ml[\._]([a-z\d\-]+)\.?\w*$", instance_type)
if match:
if match[1].startswith("inf") or match[1].startswith("trn"):
return True
Expand Down
2 changes: 1 addition & 1 deletion src/sagemaker/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1529,7 +1529,7 @@ def get_instance_type_family(instance_type: str) -> str:
"""
instance_type_family = ""
if isinstance(instance_type, str):
match = re.match(r"^ml[\._]([a-z\d]+)\.?\w*$", instance_type)
match = re.match(r"^ml[\._]([a-z\d\-]+)\.?\w*$", instance_type)
if match is not None:
instance_type_family = match[1]
return instance_type_family
Expand Down
2 changes: 2 additions & 0 deletions tests/unit/sagemaker/serve/utils/test_optimize_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,8 @@
[
("ml.trn1.2xlarge", True),
("ml.inf2.xlarge", True),
("ml.trn1-n.2xlarge", True),
("ml.inf2-b.xlarge", True),
("ml.c7gd.4xlarge", False),
],
)
Expand Down
15 changes: 15 additions & 0 deletions tests/unit/test_estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2246,6 +2246,21 @@ def test_get_instance_type_gpu(sagemaker_session):
assert "ml.p3.16xlarge" == estimator._get_instance_type()


def test_get_instance_type_gpu_with_hyphens(sagemaker_session):
estimator = Estimator(
image_uri="some-image",
role="some_image",
instance_groups=[
InstanceGroup("group1", "ml.c4.xlarge", 1),
InstanceGroup("group2", "ml.p6-b200.48xlarge", 2),
],
sagemaker_session=sagemaker_session,
base_job_name="base_job_name",
)

assert "ml.p6-b200.48xlarge" == estimator._get_instance_type()


def test_estimator_with_output_compression_disabled(sagemaker_session):
estimator = Estimator(
image_uri="some-image",
Expand Down
11 changes: 11 additions & 0 deletions tests/unit/test_fw_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1065,6 +1065,13 @@ def test_validate_unsupported_distributions_trainium_raises():
instance_type="ml.trn1.32xlarge",
)

with pytest.raises(ValueError):
mpi_enabled = {"mpi": {"enabled": True}}
fw_utils.validate_distribution_for_instance_type(
distribution=mpi_enabled,
instance_type="ml.trn1-n.2xlarge",
)

with pytest.raises(ValueError):
pytorch_ddp_enabled = {"pytorch_ddp": {"enabled": True}}
fw_utils.validate_distribution_for_instance_type(
Expand All @@ -1082,6 +1089,7 @@ def test_validate_unsupported_distributions_trainium_raises():

def test_instance_type_supports_profiler():
assert fw_utils._instance_type_supports_profiler("ml.trn1.xlarge") is True
assert fw_utils._instance_type_supports_profiler("ml.trn1-n.xlarge") is True
assert fw_utils._instance_type_supports_profiler("ml.m4.xlarge") is False
assert fw_utils._instance_type_supports_profiler("local") is False

Expand All @@ -1097,6 +1105,8 @@ def test_is_gpu_instance():
"ml.g4dn.xlarge",
"ml.g5.xlarge",
"ml.g5.48xlarge",
"ml.p6-b200.48xlarge",
"ml.g6e-12xlarge.xlarge",
"local_gpu",
]
non_gpu_instance_types = [
Expand All @@ -1116,6 +1126,7 @@ def test_is_trainium_instance():
trainium_instance_types = [
"ml.trn1.2xlarge",
"ml.trn1.32xlarge",
"ml.trn1-n.2xlarge",
]
non_trainum_instance_types = [
"ml.t3.xlarge",
Expand Down
1 change: 1 addition & 0 deletions tests/unit/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1844,6 +1844,7 @@ def test_instance_family_from_full_instance_type(self):
"ml.afbsadjfbasfb.sdkjfnsa": "afbsadjfbasfb",
"ml_fdsfsdf.xlarge": "fdsfsdf",
"ml_c2.4xlarge": "c2",
"ml.p6-b200.48xlarge": "p6-b200",
"sdfasfdda": "",
"local": "",
"c2.xlarge": "",
Expand Down