Skip to content

Commit 4f92636

Browse files
MarkDaoustcopybara-github
authored andcommitted
Fix function.ipynb
PiperOrigin-RevId: 547932510
1 parent e05e326 commit 4f92636

File tree

1 file changed

+13
-15
lines changed

1 file changed

+13
-15
lines changed

site/en/guide/function.ipynb

Lines changed: 13 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -91,8 +91,6 @@
9191
},
9292
"outputs": [],
9393
"source": [
94-
"# Update TensorFlow, as this notebook requires version 2.9 or later\n",
95-
"!pip install -q -U tensorflow>=2.9.0\n",
9694
"import tensorflow as tf"
9795
]
9896
},
@@ -362,7 +360,7 @@
362360
"source": [
363361
"#### Rules of tracing\n",
364362
"\n",
365-
"When called, a `Function` matches the call arguments to existing `ConcreteFunction`s using `tf.types.experimental.TraceType` of each argument. If a matching `ConcreteFunction` is found, the call is dispatched to it. If no match is found, a new `ConcreteFunction` is traced. \n",
363+
"When called, a `Function` matches the call arguments to existing `ConcreteFunction`s using `tf.types.experimental.TraceType` of each argument. If a matching `ConcreteFunction` is found, the call is dispatched to it. If no match is found, a new `ConcreteFunction` is traced.\n",
366364
"\n",
367365
"If multiple matches are found, the most specific signature is chosen. Matching is done by [subtyping](https://en.wikipedia.org/wiki/Subtyping), much like normal function calls in C++ or Java, for instance. For example, `TensorShape([1, 2])` is a subtype of `TensorShape([None, None])` and so a call to the tf.function with `TensorShape([1, 2])` can be dispatched to the `ConcreteFunction` produced with `TensorShape([None, None])` but if a `ConcreteFunction` with `TensorShape([1, None])` also exists then it will prioritized since it is more specific.\n",
368366
"\n",
@@ -422,11 +420,11 @@
422420
"\n",
423421
"print(next_collatz(tf.constant([1, 2])))\n",
424422
"# You specified a 1-D tensor in the input signature, so this should fail.\n",
425-
"with assert_raises(ValueError):\n",
423+
"with assert_raises(TypeError):\n",
426424
" next_collatz(tf.constant([[1, 2], [3, 4]]))\n",
427425
"\n",
428426
"# You specified an int32 dtype in the input signature, so this should fail.\n",
429-
"with assert_raises(ValueError):\n",
427+
"with assert_raises(TypeError):\n",
430428
" next_collatz(tf.constant([1.0, 2.0]))\n"
431429
]
432430
},
@@ -560,8 +558,8 @@
560558
" flavor = tf.constant([3, 4])\n",
561559
"\n",
562560
"# As described in the above rules, a generic TraceType for `Apple` and `Mango`\n",
563-
"# is generated (and a corresponding ConcreteFunction is traced) but it fails to \n",
564-
"# match the second function call since the first pair of Apple() and Mango() \n",
561+
"# is generated (and a corresponding ConcreteFunction is traced) but it fails to\n",
562+
"# match the second function call since the first pair of Apple() and Mango()\n",
565563
"# have gone out out of scope by then and deleted.\n",
566564
"get_mixed_flavor(Apple(), Mango()) # Traces a new concrete function\n",
567565
"get_mixed_flavor(Apple(), Mango()) # Traces a new concrete function again\n",
@@ -591,7 +589,7 @@
591589
"\n",
592590
" def __eq__(self, other):\n",
593591
" return type(other) is FruitTraceType and self.fruit_type == other.fruit_type\n",
594-
" \n",
592+
"\n",
595593
" def __hash__(self):\n",
596594
" return hash(self.fruit_type)\n",
597595
"\n",
@@ -970,7 +968,7 @@
970968
"id": "JeD2U-yrbfVb"
971969
},
972970
"source": [
973-
"When wrapping Python/NumPy data in a Dataset, be mindful of `tf.data.Dataset.from_generator` versus ` tf.data.Dataset.from_tensors`. The former will keep the data in Python and fetch it via `tf.py_function` which can have performance implications, whereas the latter will bundle a copy of the data as one large `tf.constant()` node in the graph, which can have memory implications.\n",
971+
"When wrapping Python/NumPy data in a Dataset, be mindful of `tf.data.Dataset.from_generator` versus ` tf.data.Dataset.from_tensor_slices`. The former will keep the data in Python and fetch it via `tf.py_function` which can have performance implications, whereas the latter will bundle a copy of the data as one large `tf.constant()` node in the graph, which can have memory implications.\n",
974972
"\n",
975973
"Reading data from files via `TFRecordDataset`, `CsvDataset`, etc. is the most effective way to consume data, as then TensorFlow itself can manage the asynchronous loading and prefetching of data, without having to involve Python. To learn more, see the [`tf.data`: Build TensorFlow input pipelines](../../guide/data) guide."
976974
]
@@ -1608,7 +1606,7 @@
16081606
"new_model = SimpleModel()\n",
16091607
"evaluate_no_bias = tf.function(evaluate).get_concrete_function(new_model, x)\n",
16101608
"# Don't pass in `new_model`, `Function` already captured its state during tracing.\n",
1611-
"print(evaluate_no_bias(x)) "
1609+
"print(evaluate_no_bias(x))"
16121610
]
16131611
},
16141612
{
@@ -1752,7 +1750,7 @@
17521750
"source": [
17531751
"opt1 = tf.keras.optimizers.Adam(learning_rate = 1e-2)\n",
17541752
"opt2 = tf.keras.optimizers.Adam(learning_rate = 1e-3)\n",
1755-
" \n",
1753+
"\n",
17561754
"@tf.function\n",
17571755
"def train_step(w, x, y, optimizer):\n",
17581756
" with tf.GradientTape() as tape:\n",
@@ -1802,13 +1800,13 @@
18021800
"y = tf.constant([2.])\n",
18031801
"\n",
18041802
"# Make a new Function and ConcreteFunction for each optimizer.\n",
1805-
"train_step_1 = tf.function(train_step).get_concrete_function(w, x, y, opt1)\n",
1806-
"train_step_2 = tf.function(train_step).get_concrete_function(w, x, y, opt2)\n",
1803+
"train_step_1 = tf.function(train_step)\n",
1804+
"train_step_2 = tf.function(train_step)\n",
18071805
"for i in range(10):\n",
18081806
" if i % 2 == 0:\n",
1809-
" train_step_1(w, x, y) # `opt1` is not used as a parameter. \n",
1807+
" train_step_1(w, x, y, opt1)\n",
18101808
" else:\n",
1811-
" train_step_2(w, x, y) # `opt2` is not used as a parameter."
1809+
" train_step_2(w, x, y, opt2)"
18121810
]
18131811
},
18141812
{

0 commit comments

Comments
 (0)