Spaces:
Runtime error
Runtime error
Update newapi.py
Browse files
newapi.py
CHANGED
@@ -28,7 +28,7 @@ transform = transforms.Compose([
|
|
28 |
transforms.Resize((224, 224)),
|
29 |
transforms.Grayscale(num_output_channels=1), # Ensure grayscale
|
30 |
transforms.ToTensor(),
|
31 |
-
transforms.Normalize(mean=[0.
|
32 |
])
|
33 |
|
34 |
# Define the exact same model used during training
|
@@ -41,7 +41,7 @@ class BrainTumorModel(nn.Module):
|
|
41 |
self.con2d = nn.Conv2d(32, 64, kernel_size=3)
|
42 |
self.con3d = nn.Conv2d(64, 128, kernel_size=3)
|
43 |
self.pool = nn.MaxPool2d(2)
|
44 |
-
self.fc1 = nn.Linear(128 *
|
45 |
self.fc2 = nn.Linear(512, 256)
|
46 |
self.output = nn.Linear(256, 4) # 4 classes expected
|
47 |
|
@@ -49,7 +49,7 @@ class BrainTumorModel(nn.Module):
|
|
49 |
x = self.pool(torch.relu(self.con1d(x)))
|
50 |
x = self.pool(torch.relu(self.con2d(x)))
|
51 |
x = self.pool(torch.relu(self.con3d(x)))
|
52 |
-
x = x.view(-1, 128 *
|
53 |
x = torch.relu(self.fc1(x))
|
54 |
x = torch.relu(self.fc2(x))
|
55 |
x = self.output(x)
|
|
|
28 |
transforms.Resize((224, 224)),
|
29 |
transforms.Grayscale(num_output_channels=1), # Ensure grayscale
|
30 |
transforms.ToTensor(),
|
31 |
+
transforms.Normalize(mean=[0.286], std=[0.229]), # Adjust mean/std if needed
|
32 |
])
|
33 |
|
34 |
# Define the exact same model used during training
|
|
|
41 |
self.con2d = nn.Conv2d(32, 64, kernel_size=3)
|
42 |
self.con3d = nn.Conv2d(64, 128, kernel_size=3)
|
43 |
self.pool = nn.MaxPool2d(2)
|
44 |
+
self.fc1 = nn.Linear(128 * 28 * 28, 512) # Match the saved model's input size
|
45 |
self.fc2 = nn.Linear(512, 256)
|
46 |
self.output = nn.Linear(256, 4) # 4 classes expected
|
47 |
|
|
|
49 |
x = self.pool(torch.relu(self.con1d(x)))
|
50 |
x = self.pool(torch.relu(self.con2d(x)))
|
51 |
x = self.pool(torch.relu(self.con3d(x)))
|
52 |
+
x = x.view(-1, 128 * 28 * 28) # Flatten the feature maps
|
53 |
x = torch.relu(self.fc1(x))
|
54 |
x = torch.relu(self.fc2(x))
|
55 |
x = self.output(x)
|