Skip to content

Remove inputs and container artifacts for local mode trainer #4978

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Jan 13, 2025
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions src/sagemaker/modules/local_core/local_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,8 @@ class _LocalContainer(BaseModel):
container_entrypoint: Optional[List[str]]
container_arguments: Optional[List[str]]

_temperary_folders: List[str] = []
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: temporary_folders


def model_post_init(self, __context: Any):
"""Post init method to perform custom validation and set default values."""
self.hosts = [f"algo-{i}" for i in range(1, self.instance_count + 1)]
Expand Down Expand Up @@ -146,12 +148,15 @@ def model_post_init(self, __context: Any):
def train(
self,
wait: bool,
remove_inputs_and_container_artifacts: Optional[bool] = True,
) -> str:
"""Run a training job locally using docker-compose.

Args:
wait (bool):
Whether to wait the training output before exiting.
remove_inputs_and_container_artifacts (Optional[bool]):
Whether to remove inputs and container artifacts after training.
"""
# create output/data folder since sagemaker-containers 2.0 expects it
os.makedirs(os.path.join(self.container_root, "output", "data"), exist_ok=True)
Expand Down Expand Up @@ -201,6 +206,14 @@ def train(

# Print our Job Complete line
logger.info("Local training job completed, output artifacts saved to %s", artifacts)

if remove_inputs_and_container_artifacts:
shutil.rmtree(os.path.join(self.container_root, "input"))
shutil.rmtree(os.path.join(self.container_root, "shared"))
for host in self.hosts:
shutil.rmtree(os.path.join(self.container_root, host))
for folder in self._temperary_folders:
shutil.rmtree(os.path.join(self.container_root, folder))
return artifacts

def retrieve_artifacts(
Expand Down Expand Up @@ -540,6 +553,7 @@ def _get_data_source_local_path(self, data_source: DataSource):
uri = data_source.s3_data_source.s3_uri
parsed_uri = urlparse(uri)
local_dir = TemporaryDirectory(prefix=os.path.join(self.container_root + "/")).name
self._temperary_folders.append(local_dir)
download_folder(parsed_uri.netloc, parsed_uri.path, local_dir, self.sagemaker_session)
return local_dir
else:
Expand Down
5 changes: 4 additions & 1 deletion src/sagemaker/modules/train/model_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ class ModelTrainer(BaseModel):
local_container_root (Optional[str]):
The local root directory to store artifacts from a training job launched in
"LOCAL_CONTAINER" mode.
remove_inputs_and_container_artifacts (Optional[bool]):
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This option seems to be specifically for local mode? Can we not just do the cleanup for local mode by default?
We should be very careful in expanding the input param list for ModelTrainer (we shouldn't unless User experience warrants it).

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

+1

Is there any advantage we provide by keeping the artifacts stored?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

the users may need to check the environment variables and hyperparameters in each container to debug

Whether to remove inputs and container artifacts after training.
"""

model_config = ConfigDict(arbitrary_types_allowed=True, extra="forbid")
Expand All @@ -227,6 +229,7 @@ class ModelTrainer(BaseModel):
hyperparameters: Optional[Dict[str, Any]] = {}
tags: Optional[List[Tag]] = None
local_container_root: Optional[str] = os.getcwd()
remove_inputs_and_container_artifacts: Optional[bool] = True

# Created Artifacts
_latest_training_job: Optional[resources.TrainingJob] = PrivateAttr(default=None)
Expand Down Expand Up @@ -646,7 +649,7 @@ def train(
hyper_parameters=string_hyper_parameters,
environment=self.environment,
)
local_container.train(wait)
local_container.train(wait, self.remove_inputs_and_container_artifacts)

def create_input_data_channel(
self, channel_name: str, data_source: DataSourceType, key_prefix: Optional[str] = None
Expand Down
133 changes: 128 additions & 5 deletions tests/integ/sagemaker/modules/train/test_local_model_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,18 +92,78 @@ def test_single_container_local_mode_local_data(modules_sagemaker_session):
"compressed_artifacts",
"artifacts",
"model",
"shared",
"input",
"output",
"algo-1",
]

for directory in directories:
path = os.path.join(CWD, directory)
delete_local_path(path)


def test_single_container_local_mode_s3_data(modules_sagemaker_session):
def test_single_container_local_mode_s3_data_remove_input(modules_sagemaker_session):
with lock.lock(LOCK_PATH):
try:
# upload local data to s3
session = modules_sagemaker_session
bucket = session.default_bucket()
session.upload_data(
path=os.path.join(SOURCE_DIR, "data/train/"),
bucket=bucket,
key_prefix="data/train",
)
session.upload_data(
path=os.path.join(SOURCE_DIR, "data/test/"),
bucket=bucket,
key_prefix="data/test",
)

source_code = SourceCode(
source_dir=SOURCE_DIR,
entry_script="local_training_script.py",
)

compute = Compute(
instance_type="local_cpu",
instance_count=1,
)

# read input data from s3
train_data = InputData(channel_name="train", data_source=f"s3://{bucket}/data/train/")

test_data = InputData(channel_name="test", data_source=f"s3://{bucket}/data/test/")

model_trainer = ModelTrainer(
training_image=DEFAULT_CPU_IMAGE,
sagemaker_session=modules_sagemaker_session,
source_code=source_code,
compute=compute,
input_data_config=[train_data, test_data],
base_job_name="local_mode_single_container_s3_data",
training_mode=Mode.LOCAL_CONTAINER,
)

model_trainer.train()
assert os.path.exists(os.path.join(CWD, "compressed_artifacts/model.tar.gz"))
finally:
subprocess.run(["docker", "compose", "down", "-v"])

assert not os.path.exists(os.path.join(CWD, "shared"))
assert not os.path.exists(os.path.join(CWD, "input"))
assert not os.path.exists(os.path.join(CWD, "algo-1"))

directories = [
"compressed_artifacts",
"artifacts",
"model",
"output",
]

for directory in directories:
path = os.path.join(CWD, directory)
delete_local_path(path)


def test_single_container_local_mode_s3_data_not_remove_input(modules_sagemaker_session):
with lock.lock(LOCK_PATH):
try:
# upload local data to s3
Expand Down Expand Up @@ -143,6 +203,7 @@ def test_single_container_local_mode_s3_data(modules_sagemaker_session):
input_data_config=[train_data, test_data],
base_job_name="local_mode_single_container_s3_data",
training_mode=Mode.LOCAL_CONTAINER,
remove_inputs_and_container_artifacts=False,
)

model_trainer.train()
Expand All @@ -164,7 +225,68 @@ def test_single_container_local_mode_s3_data(modules_sagemaker_session):
delete_local_path(path)


def test_multi_container_local_mode(modules_sagemaker_session):
def test_multi_container_local_mode_remove_input(modules_sagemaker_session):
with lock.lock(LOCK_PATH):
try:
source_code = SourceCode(
source_dir=SOURCE_DIR,
entry_script="local_training_script.py",
)

distributed = Torchrun(
process_count_per_node=1,
)

compute = Compute(
instance_type="local_cpu",
instance_count=2,
)

train_data = InputData(
channel_name="train",
data_source=os.path.join(SOURCE_DIR, "data/train/"),
)

test_data = InputData(
channel_name="test",
data_source=os.path.join(SOURCE_DIR, "data/test/"),
)

model_trainer = ModelTrainer(
training_image=DEFAULT_CPU_IMAGE,
sagemaker_session=modules_sagemaker_session,
source_code=source_code,
distributed=distributed,
compute=compute,
input_data_config=[train_data, test_data],
base_job_name="local_mode_multi_container",
training_mode=Mode.LOCAL_CONTAINER,
)

model_trainer.train()
assert os.path.exists(os.path.join(CWD, "compressed_artifacts/model.tar.gz"))

finally:
subprocess.run(["docker", "compose", "down", "-v"])

assert not os.path.exists(os.path.join(CWD, "shared"))
assert not os.path.exists(os.path.join(CWD, "input"))
assert not os.path.exists(os.path.join(CWD, "algo-1"))
assert not os.path.exists(os.path.join(CWD, "algo-2"))

directories = [
"compressed_artifacts",
"artifacts",
"model",
"output",
]

for directory in directories:
path = os.path.join(CWD, directory)
delete_local_path(path)


def test_multi_container_local_mode_not_remove_input(modules_sagemaker_session):
with lock.lock(LOCK_PATH):
try:
source_code = SourceCode(
Expand Down Expand Up @@ -200,6 +322,7 @@ def test_multi_container_local_mode(modules_sagemaker_session):
input_data_config=[train_data, test_data],
base_job_name="local_mode_multi_container",
training_mode=Mode.LOCAL_CONTAINER,
remove_inputs_and_container_artifacts=False,
)

model_trainer.train()
Expand Down
Loading