SahithiR commited on
Commit
6cee2f4
·
1 Parent(s): 977ae1b

Create CUSTOMRESNET.py

Browse files
Files changed (1) hide show
  1. CUSTOMRESNET.py +94 -0
CUSTOMRESNET.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch.nn.functional as F
2
+ import torch
3
+ import torch.nn as nn
4
+
5
+ class Net(nn.Module):
6
+ def __init__(self):
7
+ super(Net, self).__init__()
8
+ #drop=0.01
9
+ # Preparation Layer
10
+ self.conv1 = nn.Sequential (
11
+ nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1,bias=False),
12
+ nn.BatchNorm2d(64),
13
+ nn.ReLU(inplace=True)
14
+ ) # Number of Parameters = 3*3*3*64=1728
15
+ # Layer 1
16
+ self.conv11 = nn.Sequential(
17
+ nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1,bias=False),
18
+ nn.MaxPool2d(kernel_size=2,stride=2),
19
+ nn.BatchNorm2d(128),
20
+ nn.ReLU(inplace=True)
21
+ ) # Number of Parameters = 3*3*64*128 = 73728
22
+ self.conv12 = nn.Sequential(
23
+ nn.Conv2d(128,128, kernel_size=3, stride=1, padding=1,bias=False),# Number of Parameters = 3*3*64*128 = 73728
24
+ nn.BatchNorm2d(128),
25
+ nn.ReLU(inplace=True),
26
+ nn.Conv2d(128,128, kernel_size=3, stride=1, padding=1,bias=False),# Number of Parameters = 3*3*64*128 = 73728
27
+ nn.BatchNorm2d(128),
28
+ nn.ReLU(inplace=True)
29
+ )
30
+
31
+ # Layer 2
32
+ self.conv2 = nn.Sequential(
33
+ nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1,bias=False),
34
+ nn.MaxPool2d(kernel_size=2,stride=2),
35
+ nn.BatchNorm2d(256),
36
+ nn.ReLU(inplace=True)
37
+ )
38
+
39
+ # Layer 3
40
+ self.conv31 = nn.Sequential(
41
+ nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1,bias=False),
42
+ nn.MaxPool2d(kernel_size=2,stride=2),
43
+ nn.BatchNorm2d(512),
44
+ nn.ReLU(inplace=True)
45
+ )
46
+ self.conv32 = nn.Sequential(
47
+ nn.Conv2d(512,512, kernel_size=3, stride=1, padding=1,bias=False),
48
+ nn.BatchNorm2d(512),
49
+ nn.ReLU(inplace=True),
50
+ nn.Conv2d(512,512, kernel_size=3, stride=1, padding=1,bias=False),
51
+ nn.BatchNorm2d(512),
52
+ nn.ReLU(inplace=True)
53
+ )
54
+
55
+ self.maxpool = nn.MaxPool2d(kernel_size=4,stride=2)
56
+
57
+ # Fully connected
58
+ self.fc = nn.Linear(512, 10, bias=True)
59
+
60
+
61
+ def forward(self, x):
62
+ x = self.conv1(x)
63
+
64
+ x = self.conv11(x)
65
+ R1=x
66
+ x = self.conv12(x)
67
+ x=x+R1
68
+
69
+ x = self.conv2(x)
70
+
71
+ x = self.conv31(x)
72
+ R2=x
73
+ x = self.conv32(x)
74
+ x=x+R2
75
+
76
+ x = self.maxpool(x)
77
+ #x = x.randn(512, 1)
78
+
79
+ # squeeze the tensor to size 512x
80
+ x = x.squeeze(dim=[2, 3])
81
+
82
+ #x = x.view(512, 10)
83
+
84
+ x = self.fc(x)
85
+
86
+ x = x.view(-1, 10)
87
+ return x
88
+ #y = F.log_softmax(x, dim=-1)
89
+ #return y
90
+
91
+ def model_summary(model,input_size):
92
+ model = Net().to(device)
93
+ summary(model, input_size=(3, 32, 32))
94
+ return model,input_size