diff --git a/assets/recipes_generation.en.md b/assets/recipes_generation.en.md index 0e701e3..c0b23f8 100644 --- a/assets/recipes_generation.en.md +++ b/assets/recipes_generation.en.md @@ -1289,14 +1289,14 @@ _➔ output:_ To get actual predictions from the model we need to sample from the output distribution, to get actual character indices. This distribution is defined by the logits over the character vocabulary. ```python -print('Prediction for the 1st letter of the batch 1st sequense:') +print('Prediction for the 1st letter of the batch 1st sequence:') print(example_batch_predictions[0, 0]) ``` _➔ output:_ > ```text -> Prediction for the 1st letter of the batch 1st sequense: +> Prediction for the 1st letter of the batch 1st sequence: > tf.Tensor( > [-9.0643829e-03 -1.9503604e-03 9.3381782e-04 3.7442446e-03 > -2.0541784e-03 -7.4054599e-03 -7.1884273e-03 2.6014952e-03 diff --git a/assets/recipes_generation.ru.md b/assets/recipes_generation.ru.md index 1a271a1..03af509 100644 --- a/assets/recipes_generation.ru.md +++ b/assets/recipes_generation.ru.md @@ -1301,14 +1301,14 @@ _➔ вывод:_ Для того, чтобы выбрать символ, который по мнению модели должен идти следующим нам необходимо сделать sampling по вероятностям появления каждого символа. ```python -print('Prediction for the 1st letter of the batch 1st sequense:') +print('Prediction for the 1st letter of the batch 1st sequence:') print(example_batch_predictions[0, 0]) ``` _➔ вывод:_ > ```text -> Prediction for the 1st letter of the batch 1st sequense: +> Prediction for the 1st letter of the batch 1st sequence: > tf.Tensor( > [-9.0643829e-03 -1.9503604e-03 9.3381782e-04 3.7442446e-03 > -2.0541784e-03 -7.4054599e-03 -7.1884273e-03 2.6014952e-03 diff --git a/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb b/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb index 554cbed..f38bea0 100644 --- a/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb +++ b/experiments/recipe_generation_rnn/recipe_generation_rnn.ipynb @@ -2603,7 +2603,7 @@ } ], "source": [ - "# Let's do a quick detour and see how Embeding layer works.\n", + "# Let's do a quick detour and see how Embedding layer works.\n", "# It takes several char indices sequences (batch) as an input.\n", "# It encodes every character of every sequence to a vector of tmp_embeding_size length.\n", "tmp_vocab_size = 10\n", @@ -2894,7 +2894,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Prediction for the 1st letter of the batch 1st sequense:\n", + "Prediction for the 1st letter of the batch 1st sequence:\n", "tf.Tensor(\n", "[-9.0643829e-03 -1.9503604e-03 9.3381782e-04 3.7442446e-03\n", " -2.0541784e-03 -7.4054599e-03 -7.1884273e-03 2.6014952e-03\n", @@ -2944,7 +2944,7 @@ } ], "source": [ - "print('Prediction for the 1st letter of the batch 1st sequense:')\n", + "print('Prediction for the 1st letter of the batch 1st sequence:')\n", "print(example_batch_predictions[0, 0])" ] }, diff --git a/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb b/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb index 56d84bc..44139e0 100644 --- a/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb +++ b/experiments/text_generation_shakespeare_rnn/text_generation_shakespeare_rnn.ipynb @@ -1417,7 +1417,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Prediction for the 1st letter of the batch 1st sequense:\n", + "Prediction for the 1st letter of the batch 1st sequence:\n", "tf.Tensor(\n", "[-5.5658985e-03 -5.6167855e-03 2.3333444e-03 -5.4010577e-03\n", " -1.2658490e-03 -2.0685978e-03 -1.7119508e-03 -1.9059415e-03\n", @@ -1440,7 +1440,7 @@ } ], "source": [ - "print('Prediction for the 1st letter of the batch 1st sequense:')\n", + "print('Prediction for the 1st letter of the batch 1st sequence:')\n", "print(example_batch_predictions[0, 0])" ] }, diff --git a/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb b/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb index a10b935..71d7a0d 100644 --- a/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb +++ b/experiments/text_generation_wikipedia_rnn/text_generation_wikipedia_rnn.ipynb @@ -1055,7 +1055,7 @@ } ], "source": [ - "# Map character indices to characters from vacabulary.\n", + "# Map character indices to characters from vocabulary.\n", "index2char = np.array(vocab)\n", "\n", "print(index2char)" @@ -1365,7 +1365,7 @@ "id": "BDYHEJ0pY1ai" }, "source": [ - "Each index of these vectors are processed as one time step. For the input at time step 0, the model receives the index for \"F\" and trys to predict the index for \"i\" as the next character. At the next timestep, it does the same thing but the RNN considers the previous step context in addition to the current input character." + "Each index of these vectors are processed as one time step. For the input at time step 0, the model receives the index for \"F\" and tries to predict the index for \"i\" as the next character. At the next timestep, it does the same thing but the RNN considers the previous step context in addition to the current input character." ] }, { @@ -1644,7 +1644,7 @@ } ], "source": [ - "# Let's do a quick detour and see how Embeding layer works.\n", + "# Let's do a quick detour and see how Embedding layer works.\n", "# It takes several char indices sequences (batch) as an input.\n", "# It encodes every character of every sequence to a vector of tmp_embeding_size length.\n", "tmp_vocab_size = 10\n", @@ -1935,7 +1935,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Prediction for the 1st letter of the batch 1st sequense:\n", + "Prediction for the 1st letter of the batch 1st sequence:\n", "tf.Tensor(\n", "[-2.96991039e-03 2.02196068e-04 5.34047745e-03 -2.94846855e-03\n", " -3.64167639e-03 -2.63241702e-04 -8.80502281e-04 7.99844624e-04\n", @@ -2097,7 +2097,7 @@ } ], "source": [ - "print('Prediction for the 1st letter of the batch 1st sequense:')\n", + "print('Prediction for the 1st letter of the batch 1st sequence:')\n", "print(example_batch_predictions[0, 0])" ] },