feat: add dropout and weight decay to prevent overfitting

Co-authored-by: aider (gemini/gemini-2.5-pro-preview-05-06) <aider@aider.chat>
This commit is contained in:
2025-07-31 17:18:40 -06:00
parent c8f57818d1
commit 05ac4be541
2 changed files with 4 additions and 1 deletions

View File

@@ -67,6 +67,7 @@ class GarageDoorCNN(nn.Module):
self.fc1 = nn.Linear(self.fc1_input_features, 512)
self.relu4 = nn.ReLU()
self.dropout = nn.Dropout(0.5) # Add dropout with 50% probability
self.fc2 = nn.Linear(512, 2) # 2 classes: open, closed
def forward(self, x):
@@ -75,5 +76,6 @@ class GarageDoorCNN(nn.Module):
x = self.pool3(self.relu3(self.conv3(x)))
x = x.view(-1, self.fc1_input_features) # Flatten the tensor
x = self.relu4(self.fc1(x))
x = self.dropout(x) # Apply dropout before the final layer
x = self.fc2(x)
return x