File size: 1,123 Bytes
a4a31bd d14b595 9039685 d14b595 9039685 a4a31bd 9039685 a4a31bd 9039685 a4a31bd 9039685 a4a31bd 9039685 a4a31bd 9039685 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
import torch
import torch.nn as nn
import torch.nn.functional as F
class SeizureDetector(nn.Module):
def init(self, num_classes=2):
super(SeizureDetector, self).init()
self.conv1= nn.Conv2d(1, 32, kernel_size=3, stride=1, padding=1) # 32, 32, 32
self.pool= nn.MaxPool2d(kernel_size=2, stride=2) # 32, 16, 16
self.conv2= nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1) # 64, 16, 16 -> 64, 8, 8
# Adding Batch Normalization
self.bn1 = nn.BatchNorm2d(32)
self.bn2 = nn.BatchNorm2d(64)
self.dropout = nn.Dropout(p=0.5) # Dropout with a probability of 50%
self.fc1= nn.Linear(64 * 8 * 8, 120)
self.fc2= nn.Linear(120, 32)
self.fc3= nn.Linear(32, num_classes)
def forward(self, x):
x = self.pool(F.relu(self.bn1(self.conv1(x)))) # 32, 32, 32
x = self.pool(F.relu(self.bn2(self.conv2(x)))) # 64, 8, 8
x = torch.flatten(x, 1)
x = self.dropout(F.relu(self.fc1(x))) # Apply dropout
x = self.dropout(F.relu(self.fc2(x))) # Apply dropout
x = self.fc3(x)
return x
|