Skip to content

Commit ac5f28a

Browse files
SinaChavoshiTensorflow Cloud maintainers
authored andcommitted
Update examples for Colab Sandbox environment.
PiperOrigin-RevId: 357310307
1 parent a2e5e98 commit ac5f28a

4 files changed

+143
-106
lines changed

examples/distributed_training_nasnet_with_tensorflow_cloud.ipynb

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,15 @@
4141
"import os\n",
4242
"import sys\n",
4343
"import tensorflow as tf\n",
44-
"import tensorflow_cloud as tfc"
44+
"import subprocess\n",
45+
"\n",
46+
"# Install latest version of tensorflow_cloud\n",
47+
"if os.environ.get(\"TF_KERAS_RUNNING_REMOTELY\", True):\n",
48+
" subprocess.run(\n",
49+
" ['python3', '-m', 'pip', 'install', 'tensorflow-cloud', '-q'])\n",
50+
"\n",
51+
"import tensorflow_cloud as tfc\n",
52+
"print(tfc.__version__)"
4553
]
4654
},
4755
{
@@ -113,7 +121,8 @@
113121
" # Authentication for Colab Notebooks\n",
114122
" if \"google.colab\" in sys.modules:\n",
115123
" from google.colab import auth\n",
116-
" auth.authenticate_user()"
124+
" auth.authenticate_user()\n",
125+
" os.environ[\"GOOGLE_CLOUD_PROJECT\"] = GCP_PROJECT_ID"
117126
]
118127
},
119128
{
@@ -335,10 +344,8 @@
335344
},
336345
"outputs": [],
337346
"source": [
338-
"if not tfc.remote():\n",
339-
"\n",
340-
" %load_ext tensorboard\n",
341-
" %tensorboard --logdir TENSORBOARD_LOGS_DIR"
347+
"# %load_ext tensorboard\n",
348+
"# %tensorboard --logdir TENSORBOARD_LOGS_DIR"
342349
]
343350
}
344351
],

examples/google_cloud_project_setup_instructions.ipynb

Lines changed: 85 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,17 @@
4141
"After completing the sign up process you will be redirected to [Google Cloud Platform welcome page](https://console.cloud.google.com/home/dashboard). click on the \"Home\" tab and make a note of your Project ID."
4242
]
4343
},
44+
{
45+
"cell_type": "code",
46+
"execution_count": null,
47+
"metadata": {
48+
"id": "WPew3GoCcdtm"
49+
},
50+
"outputs": [],
51+
"source": [
52+
"GCP_PROJECT_ID = 'YOUR_PROJECT_ID'"
53+
]
54+
},
4455
{
4556
"cell_type": "markdown",
4657
"metadata": {
@@ -75,8 +86,6 @@
7586
},
7687
"outputs": [],
7788
"source": [
78-
"GCP_PROJECT_ID = 'YOUR_PROJECT_ID'\n",
79-
"\n",
8089
"import sys\n",
8190
"if \"kaggle_secrets\" in sys.modules:\n",
8291
" from kaggle_secrets import UserSecretsClient\n",
@@ -122,32 +131,12 @@
122131
"cell_type": "code",
123132
"execution_count": null,
124133
"metadata": {
125-
"executionInfo": {
126-
"elapsed": 320,
127-
"status": "ok",
128-
"timestamp": 1612139931246,
129-
"user": {
130-
"displayName": "",
131-
"photoUrl": "",
132-
"userId": ""
133-
},
134-
"user_tz": 480
135-
},
136134
"id": "T8FCFUaQ-E_E",
137-
"outputId": "66697de7-b2c0-4257-cd6f-33126aadbe0b",
138135
"trusted": true
139136
},
140-
"outputs": [
141-
{
142-
"name": "stdout",
143-
"output_type": "stream",
144-
"text": [
145-
"/bin/sh: line 1: gcloud: command not found\n"
146-
]
147-
}
148-
],
137+
"outputs": [],
149138
"source": [
150-
"!gcloud iam service-accounts create YOUR_SERVICE_ACCOUNT_NAME\n"
139+
"!gcloud beta billing accounts list"
151140
]
152141
},
153142
{
@@ -168,7 +157,8 @@
168157
},
169158
"outputs": [],
170159
"source": [
171-
"!gcloud beta billing projects link 'YOUR_PROJECT_ID' --billing-account 'YOUR_BILLING_ACCOUNT_ID'"
160+
"BILLING_ACCOUNT_ID = 'YOUR_BILLING_ACCOUNT_ID'\n",
161+
"!gcloud beta billing projects link $GCP_PROJECT_ID --billing-account $BILLING_ACCOUNT_ID"
172162
]
173163
},
174164
{
@@ -190,7 +180,7 @@
190180
},
191181
"outputs": [],
192182
"source": [
193-
"!gcloud services enable ml.googleapis.com cloudbuild.googleapis.com"
183+
"!gcloud services --project $GCP_PROJECT_ID enable ml.googleapis.com cloudbuild.googleapis.com"
194184
]
195185
},
196186
{
@@ -213,7 +203,9 @@
213203
},
214204
"outputs": [],
215205
"source": [
216-
"!gsutil mb gs://YOUR_BUCKET_NAME"
206+
"BUCKET_NAME = 'YOUR_BUCKET_NAME'\n",
207+
"GCS_BUCKET = f'gs://{BUCKET_NAME}'\n",
208+
"!gsutil mb -p $GCP_PROJECT_ID $GCS_BUCKET"
217209
]
218210
},
219211
{
@@ -224,7 +216,7 @@
224216
"source": [
225217
"## Create a service account for HP Tuning jobs\n",
226218
"This step is required to use HP Tuning on Google Cloud using CloudTuner.\n",
227-
"To [create a service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts#iam-service-accounts-create-gcloud) run the following command to create a service account and make a note of it."
219+
"To [create a service account](https://cloud.google.com/iam/docs/creating-managing-service-accounts#iam-service-accounts-create-gcloud) run the following command and make a note of your service account name."
228220
]
229221
},
230222
{
@@ -236,30 +228,74 @@
236228
},
237229
"outputs": [],
238230
"source": [
239-
"!gcloud iam service-accounts create YOUR_SERVICE_ACCOUNT_NAME"
231+
"SERVICE_ACCOUNT_NAME ='YOUR_SERVICE_ACCOUNT_NAME'\n",
232+
"SERVICE_ACCOUNT_EMAIL = f'{SERVICE_ACCOUNT_NAME}@{GCP_PROJECT_ID}.iam.gserviceaccount.com'\n",
233+
"\n",
234+
"!gcloud iam --project $GCP_PROJECT_ID service-accounts create $SERVICE_ACCOUNT_NAME"
240235
]
241236
},
242237
{
243238
"cell_type": "markdown",
244239
"metadata": {
245-
"id": "LdUZHaER-E_H"
240+
"id": "a-fNtK6rvGmg"
246241
},
247242
"source": [
248-
"Give permission to your service account to access Google Cloud Services by running the following command"
243+
"The [`default AI Platform service account`](https://cloud.google.com/ai-platform/training/docs/custom-service-account#default) is identified by an email address with the format `service-PROJECT_NUMBER@cloud-ml.google.com.iam.gserviceaccount.com`. Run the following command to get your PROJECT_NUMBER."
249244
]
250245
},
251246
{
252247
"cell_type": "code",
253248
"execution_count": null,
254249
"metadata": {
255-
"id": "kWZvvlnb-E_H",
256-
"trusted": true
250+
"id": "4MZGiPZnysMo"
257251
},
258252
"outputs": [],
259253
"source": [
260-
"!gcloud projects add-iam-policy-binding YOUR_PROJECT_ID \\\n",
261-
" --member serviceAccount:YOUR_SERVICE_ACCOUNT_NAME@YOUR_PROJECT_ID.iam.gserviceaccount.com \\\n",
262-
" --role 'roles/editor'"
254+
"!gcloud projects describe $GCP_PROJECT_ID |grep projectNumber"
255+
]
256+
},
257+
{
258+
"cell_type": "markdown",
259+
"metadata": {
260+
"id": "hfS6Erynz9Tx"
261+
},
262+
"source": [
263+
"Use the project number above to construct the service account email."
264+
]
265+
},
266+
{
267+
"cell_type": "code",
268+
"execution_count": null,
269+
"metadata": {
270+
"id": "AYPnCTq_z7p4"
271+
},
272+
"outputs": [],
273+
"source": [
274+
"PROJECT_NUMBER = 'YOUR_PROJECT_NUMBER'\n",
275+
"DEFAULT_AI_PLATFORM_SERVICE_ACCOUNT = f'service-{PROJECT_NUMBER}@cloud-ml.google.com.iam.gserviceaccount.com'"
276+
]
277+
},
278+
{
279+
"cell_type": "markdown",
280+
"metadata": {
281+
"id": "ySCk0NIF3lux"
282+
},
283+
"source": [
284+
"Grant the [`default AI Platform service account`](https://cloud.google.com/ai-platform/training/docs/custom-service-account#default) admin role (roles/iam.serviceAccountAdmin) on your new service account."
285+
]
286+
},
287+
{
288+
"cell_type": "code",
289+
"execution_count": null,
290+
"metadata": {
291+
"id": "l9HL0bYxuzWL"
292+
},
293+
"outputs": [],
294+
"source": [
295+
"!gcloud iam --project $GCP_PROJECT_ID service-accounts add-iam-policy-binding \\\n",
296+
"--role=roles/iam.serviceAccountAdmin \\\n",
297+
"--member=serviceAccount:$DEFAULT_AI_PLATFORM_SERVICE_ACCOUNT \\\n",
298+
"$SERVICE_ACCOUNT_EMAIL"
263299
]
264300
},
265301
{
@@ -271,7 +307,7 @@
271307
},
272308
"outputs": [],
273309
"source": [
274-
"Finally run the following to allow your users account to impersonate the service account,"
310+
"Finally run the following to allow the service account to impersonate your your users account."
275311
]
276312
},
277313
{
@@ -283,9 +319,9 @@
283319
},
284320
"outputs": [],
285321
"source": [
286-
"!gcloud iam service-accounts add-iam-policy-binding \\\n",
287-
" YOUR_SERVICE_ACCOUNT_NAME@YOUR_PROJECT_ID.iam.gserviceaccount.com \\\n",
288-
" --member=\"user:YOUR_EMAIL_ADDRESS\" \\\n",
322+
"!gcloud iam service-accounts --project $GCP_PROJECT_ID add-iam-policy-binding \\\n",
323+
" $SERVICE_ACCOUNT_EMAIL \\\n",
324+
" --member=\"user:YOUR_EMAIL_ADDRESS@gmail.com\" \\\n",
289325
" --role=\"roles/iam.serviceAccountUser\""
290326
]
291327
},
@@ -302,8 +338,17 @@
302338
],
303339
"metadata": {
304340
"colab": {
341+
"collapsed_sections": [],
342+
"last_runtime": {
343+
"build_target": "//cloud/ml/research/learning/colab_runtime:cair_colab",
344+
"kind": "private"
345+
},
305346
"name": "google-cloud-project-setup-instructions.ipynb",
306347
"provenance": [
348+
{
349+
"file_id": "/piper/depot/google3/third_party/tensorflow_cloud/examples/google_cloud_project_setup_instructions.ipynb?workspaceId=chavoshi:tensorflow_cloud::citc",
350+
"timestamp": 1613079100054
351+
},
307352
{
308353
"file_id": "/piper/depot/google3/third_party/tensorflow_cloud/examples/google_cloud_project_setup_instructions.ipynb?workspaceId=chavoshi:Sample_notebooks::citc",
309354
"timestamp": 1612304443485

examples/hp_tuning_cifar10_using_google_cloud.ipynb

Lines changed: 24 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -44,28 +44,21 @@
4444
"\n",
4545
"import numpy as np\n",
4646
"import pandas as pd\n",
47-
"import tensorflow_cloud as tfc\n",
4847
"import tensorflow as tf\n",
4948
"import os\n",
5049
"import sys\n",
50+
"import subprocess\n",
5151
"\n",
5252
"from tensorflow.keras import datasets, layers, models\n",
53-
"from sklearn.model_selection import train_test_split"
54-
]
55-
},
56-
{
57-
"cell_type": "code",
58-
"execution_count": null,
59-
"metadata": {
60-
"id": "TzxvWrVJyu0q",
61-
"trusted": true
62-
},
63-
"outputs": [],
64-
"source": [
65-
"if not tfc.remote():\n",
66-
" print(tfc.__version__)\n",
67-
" if tfc.__version__ \u003c '0.1.12':\n",
68-
" raise RuntimeError(\"This example requires tensorflow_cloud version 0.1.12 or newer!\")"
53+
"from sklearn.model_selection import train_test_split\n",
54+
"\n",
55+
"# Install latest version of tensorflow_cloud\n",
56+
"if os.environ.get(\"TF_KERAS_RUNNING_REMOTELY\", True):\n",
57+
" subprocess.run(\n",
58+
" ['python3', '-m', 'pip', 'install', 'tensorflow-cloud', '-q'])\n",
59+
"\n",
60+
"import tensorflow_cloud as tfc\n",
61+
"print(tfc.__version__)"
6962
]
7063
},
7164
{
@@ -164,7 +157,8 @@
164157
" # Authentication for Colab Notebooks\n",
165158
" if \"google.colab\" in sys.modules:\n",
166159
" from google.colab import auth\n",
167-
" auth.authenticate_user()"
160+
" auth.authenticate_user()\n",
161+
" os.environ[\"GOOGLE_CLOUD_PROJECT\"] = GCP_PROJECT_ID"
168162
]
169163
},
170164
{
@@ -186,18 +180,12 @@
186180
},
187181
"outputs": [],
188182
"source": [
189-
"import tensorflow_datasets as tfds\n",
183+
"(x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()\n",
190184
"\n",
191-
"data = tfds.load('cifar10')\n",
192-
"train_ds, test_ds = data['train'], data['test']\n",
193-
"\n",
194-
"def standardize_record(record):\n",
195-
" return tf.cast(record['image'], tf.float32) / 255., record['label']\n",
196-
"\n",
197-
"# Using a small batch size for local run\n",
198-
"# You can adjust the batch size for better performance in remote execution.\n",
199-
"train_ds = train_ds.map(standardize_record).cache().batch(64).shuffle(10000)\n",
200-
"test_ds = test_ds.map(standardize_record).cache().batch(64)"
185+
"# Setting input specific parameters\n",
186+
"# The model expects input of dimetions of (INPUT_IMG_SIZE, INPUT_IMG_SIZE, 3)\n",
187+
"INPUT_IMG_SIZE = 32\n",
188+
"NUM_CLASSES = 10"
201189
]
202190
},
203191
{
@@ -219,7 +207,7 @@
219207
"outputs": [],
220208
"source": [
221209
"import kerastuner\n",
222-
"from tf.keras import layers\n",
210+
"from tensorflow.keras import layers\n",
223211
"\n",
224212
"# Configure the search space\n",
225213
"HPS = kerastuner.engine.hyperparameters.HyperParameters()\n",
@@ -233,7 +221,7 @@
233221
"HPS.Float('learning_rate', 1e-4, 1e-2, sampling='log')\n",
234222
"\n",
235223
"def build_model(hp):\n",
236-
" inputs = tf.keras.Input(shape=(32, 32, 3))\n",
224+
" inputs = tf.keras.Input(shape=(INPUT_IMG_SIZE, INPUT_IMG_SIZE, 3))\n",
237225
" x = inputs\n",
238226
" for i in range(hp.get('conv_blocks')):\n",
239227
" filters = hp.get('filters_'+ str(i))\n",
@@ -250,7 +238,7 @@
250238
" x = layers.Dense(hp.get('hidden_size'),\n",
251239
" activation='relu')(x)\n",
252240
" x = layers.Dropout(hp.get('dropout'))(x)\n",
253-
" outputs = layers.Dense(10, activation='softmax')(x)\n",
241+
" outputs = layers.Dense(NUM_CLASSES, activation='softmax')(x)\n",
254242
"\n",
255243
" model = tf.keras.Model(inputs, outputs)\n",
256244
" model.compile(\n",
@@ -317,7 +305,7 @@
317305
"\n",
318306
"# Setting to run tuning remotely, you can run tuner locally to validate it works first.\n",
319307
"if tfc.remote():\n",
320-
" tuner.search(train_ds, epochs=30, validation_data=test_ds, callbacks=callbacks)"
308+
" tuner.search(x=x_train, y=y_train, epochs=30, validation_split=0.2, callbacks=callbacks)"
321309
]
322310
},
323311
{
@@ -388,10 +376,8 @@
388376
},
389377
"outputs": [],
390378
"source": [
391-
"if not tfc.remote():\n",
392-
"\n",
393-
" %load_ext tensorboard\n",
394-
" %tensorboard --logdir TENSORBOARD_LOGS_DIR"
379+
"# %load_ext tensorboard\n",
380+
"# %tensorboard --logdir TENSORBOARD_LOGS_DIR"
395381
]
396382
},
397383
{
@@ -425,6 +411,7 @@
425411
],
426412
"metadata": {
427413
"colab": {
414+
"collapsed_sections": [],
428415
"name": "hp-tuning-cifar10-using-google-cloud.ipynb",
429416
"provenance": [
430417
{

0 commit comments

Comments
 (0)