|
190 | 190 | },
|
191 | 191 | "outputs": [],
|
192 | 192 | "source": [
|
193 |
| - "def construct_nn(ann_input, ann_output):\n", |
| 193 | + "def construct_nn():\n", |
194 | 194 | " n_hidden = 5\n",
|
195 | 195 | "\n",
|
196 | 196 | " # Initialize random weights between each layer\n",
|
|
204 | 204 | " \"train_cols\": np.arange(X_train.shape[1]),\n",
|
205 | 205 | " \"obs_id\": np.arange(X_train.shape[0]),\n",
|
206 | 206 | " }\n",
|
| 207 | + " \n", |
207 | 208 | " with pm.Model(coords=coords) as neural_network:\n",
|
208 |
| - " ann_input = pm.Data(\"ann_input\", X_train, dims=(\"obs_id\", \"train_cols\"))\n", |
209 |
| - " ann_output = pm.Data(\"ann_output\", Y_train, dims=\"obs_id\")\n", |
| 209 | + " # Define minibatch variables\n", |
| 210 | + " minibatch_x, minibatch_y = pm.Minibatch(X_train, Y_train, batch_size=50)\n", |
| 211 | + " \n", |
| 212 | + " # Define data variables using minibatches\n", |
| 213 | + " ann_input = pm.Data(\"ann_input\", minibatch_x, mutable=True, dims=(\"obs_id\", \"train_cols\"))\n", |
| 214 | + " ann_output = pm.Data(\"ann_output\", minibatch_y, mutable=True, dims=\"obs_id\")\n", |
210 | 215 | "\n",
|
211 | 216 | " # Weights from input to hidden layer\n",
|
212 | 217 | " weights_in_1 = pm.Normal(\n",
|
|
231 | 236 | " \"out\",\n",
|
232 | 237 | " act_out,\n",
|
233 | 238 | " observed=ann_output,\n",
|
234 |
| - " total_size=Y_train.shape[0], # IMPORTANT for minibatches\n", |
| 239 | + " total_size=X_train.shape[0], # IMPORTANT for minibatches\n", |
235 | 240 | " dims=\"obs_id\",\n",
|
236 | 241 | " )\n",
|
237 | 242 | " return neural_network\n",
|
238 | 243 | "\n",
|
239 |
| - "\n", |
240 |
| - "neural_network = construct_nn(X_train, Y_train)" |
| 244 | + "# Create the neural network model\n", |
| 245 | + "neural_network = construct_nn()\n" |
241 | 246 | ]
|
242 | 247 | },
|
243 | 248 | {
|
|
0 commit comments