Skip to content

Commit f151383

Browse files
author
Tensorflow Cloud maintainers
committed
Merge pull request #218 from christianversloot:master
PiperOrigin-RevId: 345099633
2 parents 737b7c5 + 7d396a7 commit f151383

File tree

3 files changed

+47
-17
lines changed

3 files changed

+47
-17
lines changed

src/python/tensorflow_cloud/core/containerize.py

Lines changed: 21 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,8 @@ def __init__(
103103
# Those will be populated lazily.
104104
self.tar_file_path = None
105105
self.docker_client = None
106+
self.tar_file_descriptor = None
107+
self.docker_file_descriptor = None
106108

107109
def get_docker_image(
108110
self, max_status_check_attempts=None, delay_between_status_checks=None
@@ -117,15 +119,30 @@ def get_docker_image(
117119
"""
118120
raise NotImplementedError
119121

120-
def get_generated_files(self):
121-
return [self.docker_file_path, self.tar_file_path]
122+
def get_generated_files(self, return_descriptors=False):
123+
"""Get generated file paths and/or descriptors for generated files.
124+
125+
Args:
126+
return_descriptors: Whether to return descriptors as well.
127+
128+
Returns:
129+
Docker and tar file paths. Depending on return_descriptors, possibly
130+
their file descriptors as well.
131+
"""
132+
if return_descriptors:
133+
return [
134+
(self.docker_file_path, self.docker_file_descriptor),
135+
(self.tar_file_path, self.tar_file_descriptor)
136+
]
137+
else:
138+
return [self.docker_file_path, self.tar_file_path]
122139

123140
def _get_tar_file_path(self):
124141
"""Packages files into a tarball."""
125142
self._create_docker_file()
126143
file_path_map = self._get_file_path_map()
127144

128-
_, self.tar_file_path = tempfile.mkstemp()
145+
self.tar_file_descriptor, self.tar_file_path = tempfile.mkstemp()
129146
with tarfile.open(self.tar_file_path, "w:gz", dereference=True) as tar:
130147
for source, destination in file_path_map.items():
131148
tar.add(source, arcname=destination)
@@ -233,7 +250,7 @@ def _create_docker_file(self):
233250
)
234251

235252
content = "\n".join(lines)
236-
_, self.docker_file_path = tempfile.mkstemp()
253+
self.docker_file_descriptor, self.docker_file_path = tempfile.mkstemp()
237254
with open(self.docker_file_path, "w") as f:
238255
f.write(content)
239256

src/python/tensorflow_cloud/core/preprocess.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ def get_preprocessed_entry_point(
4747
worker_count,
4848
distribution_strategy,
4949
called_from_notebook=False,
50+
return_file_descriptor=False
5051
):
5152
"""Creates python script for distribution based on the given `entry_point`.
5253
@@ -107,13 +108,15 @@ def get_preprocessed_entry_point(
107108
`worker_count` params.
108109
called_from_notebook: Boolean. True if the API is run in a
109110
notebook environment.
111+
return_file_descriptor: Boolean. True if the file descriptor for the
112+
temporary file is also returned.
110113
111114
Returns:
112115
The `preprocessed_entry_point` file path.
113116
114117
Raises:
115118
RuntimeError: If invoked from Notebook but unable to access it.
116-
Typically, this is due to missing the `nbconvert` package.
119+
Typically, this is due to missing the `nbconvert` package.
117120
"""
118121

119122
# Set `TF_KERAS_RUNNING_REMOTELY` env variable. This is required in order
@@ -198,10 +201,15 @@ def get_preprocessed_entry_point(
198201
script_lines.append(line)
199202

200203
# Create a tmp wrapped entry point script file.
201-
_, output_file = tempfile.mkstemp(suffix=".py")
204+
file_descriptor, output_file = tempfile.mkstemp(suffix=".py")
202205
with open(output_file, "w") as f:
203206
f.writelines(script_lines)
204-
return output_file
207+
208+
# Returning file descriptor could be necessary for some os.close calls
209+
if return_file_descriptor:
210+
return (output_file, file_descriptor)
211+
else:
212+
return output_file
205213

206214

207215
def _get_colab_notebook_content():

src/python/tensorflow_cloud/core/run.py

Lines changed: 15 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -194,14 +194,16 @@ def run(
194194
if (distribution_strategy == "auto"
195195
or entry_point.endswith("ipynb")
196196
or entry_point is None):
197-
preprocessed_entry_point = preprocess.get_preprocessed_entry_point(
198-
entry_point,
199-
chief_config,
200-
worker_config,
201-
worker_count,
202-
distribution_strategy,
203-
called_from_notebook=called_from_notebook,
204-
)
197+
preprocessed_entry_point, \
198+
pep_file_descriptor = preprocess.get_preprocessed_entry_point(
199+
entry_point,
200+
chief_config,
201+
worker_config,
202+
worker_count,
203+
distribution_strategy,
204+
called_from_notebook=called_from_notebook,
205+
return_file_descriptor=True,
206+
)
205207

206208
# Create Docker file, generate a tarball, build and push Docker
207209
# image using the tarball.
@@ -227,9 +229,12 @@ def run(
227229

228230
# Delete all the temporary files we created.
229231
if preprocessed_entry_point is not None:
232+
os.close(pep_file_descriptor)
230233
os.remove(preprocessed_entry_point)
231-
for f in container_builder.get_generated_files():
232-
os.remove(f)
234+
for file_path, file_descriptor \
235+
in container_builder.get_generated_files(return_descriptors=True):
236+
os.close(file_descriptor)
237+
os.remove(file_path)
233238

234239
# Deploy Docker image on the cloud.
235240
job_id = deploy.deploy_job(

0 commit comments

Comments
 (0)