Skip to content

Commit 0bf92f0

Browse files
committed
Fix indent
1 parent e4dfc2c commit 0bf92f0

File tree

1 file changed

+15
-15
lines changed

1 file changed

+15
-15
lines changed

chapter_programming_model/Neural_Network_Programming.md

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -81,21 +81,21 @@ and `torch.nn.Module` in PyTorch. Code
8181
**ch02/code2.3.2**
8282
```python
8383
class MLP(nn.Module):
84-
def __init__(self, input_size, hidden_size, num_classes, dropout_rate=0.5):
85-
super(MLP, self).__init__()
86-
self.fc1 = nn.Linear(input_size, hidden_size)
87-
self.bn1 = nn.BatchNorm1d(hidden_size)
88-
self.relu = nn.ReLU()
89-
self.dropout = nn.Dropout(dropout_rate)
90-
self.fc2 = nn.Linear(hidden_size, num_classes)
91-
92-
def forward(self, x):
93-
out = self.fc1(x)
94-
out = self.bn1(out)
95-
out = self.relu(out)
96-
out = self.dropout(out)
97-
out = self.fc2(out)
98-
return out
84+
def __init__(self, input_size, hidden_size, num_classes, dropout_rate=0.5):
85+
super(MLP, self).__init__()
86+
self.fc1 = nn.Linear(input_size, hidden_size)
87+
self.bn1 = nn.BatchNorm1d(hidden_size)
88+
self.relu = nn.ReLU()
89+
self.dropout = nn.Dropout(dropout_rate)
90+
self.fc2 = nn.Linear(hidden_size, num_classes)
91+
92+
def forward(self, x):
93+
out = self.fc1(x)
94+
out = self.bn1(out)
95+
out = self.relu(out)
96+
out = self.dropout(out)
97+
out = self.fc2(out)
98+
return out
9999
```
100100

101101
Figure :numref:`ch03/model_build` demonstrates the intricate process of

0 commit comments

Comments
 (0)