|
94 | 94 | "\n",
|
95 | 95 | "When these perceptrons are stacked, they form structures called dense layers which can then be connected to build a neural network. A dense layer's equation is similar to that of a perceptron's but uses a weight matrix and a bias vector instead: \n",
|
96 | 96 | "\n",
|
97 |
| - "$$Y = \\mathrm{W}⋅\\mathrm{X} + \\vec{b}$$\n", |
| 97 | + "$$Z = \\mathrm{W}⋅\\mathrm{X} + \\vec{b}$$\n", |
98 | 98 | "\n",
|
99 | 99 | "where\n",
|
100 | 100 | "\n",
|
|
234 | 234 | },
|
235 | 235 | "outputs": [],
|
236 | 236 | "source": [
|
237 |
| - "sns.countplot(y_viz.numpy());\n", |
| 237 | + "sns.countplot(x=y_viz.numpy());\n", |
238 | 238 | "plt.xlabel('Digits')\n",
|
239 | 239 | "plt.title(\"MNIST Digit Distribution\");"
|
240 | 240 | ]
|
|
386 | 386 | " if not self.built:\n",
|
387 | 387 | " # Infer the input dimension based on first call\n",
|
388 | 388 | " self.in_dim = x.shape[1]\n",
|
389 |
| - " # Initialize the weights and biases using Xavier scheme\n", |
390 |
| - " self.w = tf.Variable(xavier_init(shape=(self.in_dim, self.out_dim)))\n", |
| 389 | + " # Initialize the weights and biases\n", |
| 390 | + " self.w = tf.Variable(self.weight_init(shape=(self.in_dim, self.out_dim)))\n", |
391 | 391 | " self.b = tf.Variable(tf.zeros(shape=(self.out_dim,)))\n",
|
392 | 392 | " self.built = True\n",
|
393 | 393 | " # Compute the forward pass\n",
|
|
875 | 875 | " label_ind = (y_test == label)\n",
|
876 | 876 | " # extract predictions for specific true label\n",
|
877 | 877 | " pred_label = test_classes[label_ind]\n",
|
878 |
| - " label_filled = tf.cast(tf.fill(pred_label.shape[0], label), tf.int64)\n", |
| 878 | + " labels = y_test[label_ind]\n", |
879 | 879 | " # compute class-wise accuracy\n",
|
880 |
| - " label_accs[accuracy_score(pred_label, label_filled).numpy()] = label\n", |
| 880 | + " label_accs[accuracy_score(pred_label, labels).numpy()] = label\n", |
881 | 881 | "for key in sorted(label_accs):\n",
|
882 | 882 | " print(f\"Digit {label_accs[key]}: {key:.3f}\")"
|
883 | 883 | ]
|
|
906 | 906 | " plt.figure(figsize=(10,10))\n",
|
907 | 907 | " confusion = sk_metrics.confusion_matrix(test_labels.numpy(), \n",
|
908 | 908 | " test_classes.numpy())\n",
|
909 |
| - " confusion_normalized = confusion / confusion.sum(axis=1)\n", |
| 909 | + " confusion_normalized = confusion / confusion.sum(axis=1, keepdims=True)\n", |
910 | 910 | " axis_labels = range(10)\n",
|
911 | 911 | " ax = sns.heatmap(\n",
|
912 | 912 | " confusion_normalized, xticklabels=axis_labels, yticklabels=axis_labels,\n",
|
|
0 commit comments