ShabGaming commited on
Commit
1900eb9
1 Parent(s): 01cc889

Source Files For The Model

Browse files
Files changed (1) hide show
  1. BrainTumorMRIDetection.ipynb +232 -0
BrainTumorMRIDetection.ipynb ADDED
@@ -0,0 +1,232 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "attachments": {},
5
+ "cell_type": "markdown",
6
+ "metadata": {},
7
+ "source": [
8
+ "# Brain Tumor MRI Detection"
9
+ ]
10
+ },
11
+ {
12
+ "cell_type": "code",
13
+ "execution_count": null,
14
+ "metadata": {},
15
+ "outputs": [],
16
+ "source": [
17
+ "pip install tensorflow"
18
+ ]
19
+ },
20
+ {
21
+ "cell_type": "code",
22
+ "execution_count": 2,
23
+ "metadata": {},
24
+ "outputs": [],
25
+ "source": [
26
+ "import tensorflow as tf\n",
27
+ "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n",
28
+ "from tensorflow.keras.models import load_model\n",
29
+ "from tensorflow.keras.preprocessing import image\n",
30
+ "import os\n",
31
+ "import numpy as np\n",
32
+ "from PIL import Image"
33
+ ]
34
+ },
35
+ {
36
+ "cell_type": "code",
37
+ "execution_count": 3,
38
+ "metadata": {},
39
+ "outputs": [
40
+ {
41
+ "name": "stdout",
42
+ "output_type": "stream",
43
+ "text": [
44
+ "Current Directory: e:\\Github Projects\\BrainTumorMRIDetection\n"
45
+ ]
46
+ }
47
+ ],
48
+ "source": [
49
+ "current_dir = os.getcwd()\n",
50
+ "print (\"Current Directory: \" + current_dir)"
51
+ ]
52
+ },
53
+ {
54
+ "cell_type": "code",
55
+ "execution_count": 4,
56
+ "metadata": {},
57
+ "outputs": [],
58
+ "source": [
59
+ "train_dir = os.path.join(current_dir, 'Testing')\n",
60
+ "val_dir = os.path.join(current_dir, 'Training')"
61
+ ]
62
+ },
63
+ {
64
+ "cell_type": "code",
65
+ "execution_count": 5,
66
+ "metadata": {},
67
+ "outputs": [],
68
+ "source": [
69
+ "# Define the target size and batch size\n",
70
+ "target_size = (1250, 1250)\n",
71
+ "batch_size = 32\n"
72
+ ]
73
+ },
74
+ {
75
+ "cell_type": "code",
76
+ "execution_count": 6,
77
+ "metadata": {},
78
+ "outputs": [
79
+ {
80
+ "name": "stdout",
81
+ "output_type": "stream",
82
+ "text": [
83
+ "Found 394 images belonging to 4 classes.\n",
84
+ "Found 2870 images belonging to 4 classes.\n"
85
+ ]
86
+ }
87
+ ],
88
+ "source": [
89
+ "# Define the training and validation data generators\n",
90
+ "train_datagen = ImageDataGenerator(rescale=1./255)\n",
91
+ "train_generator = train_datagen.flow_from_directory(\n",
92
+ " train_dir,\n",
93
+ " target_size=target_size,\n",
94
+ " batch_size=batch_size,\n",
95
+ " class_mode='categorical')\n",
96
+ "\n",
97
+ "val_datagen = ImageDataGenerator(rescale=1./255)\n",
98
+ "val_generator = val_datagen.flow_from_directory(\n",
99
+ " val_dir,\n",
100
+ " target_size=target_size,\n",
101
+ " batch_size=batch_size,\n",
102
+ " class_mode='categorical')\n"
103
+ ]
104
+ },
105
+ {
106
+ "cell_type": "code",
107
+ "execution_count": 7,
108
+ "metadata": {},
109
+ "outputs": [],
110
+ "source": [
111
+ "# Define the model\n",
112
+ "model = tf.keras.models.Sequential([\n",
113
+ " tf.keras.layers.Conv2D(32, (3, 3), activation='relu',\n",
114
+ " input_shape=(target_size[0], target_size[1], 3)),\n",
115
+ " tf.keras.layers.MaxPooling2D((2, 2)),\n",
116
+ " tf.keras.layers.Conv2D(64, (3, 3), activation='relu'),\n",
117
+ " tf.keras.layers.MaxPooling2D((2, 2)),\n",
118
+ " tf.keras.layers.Conv2D(128, (3, 3), activation='relu'),\n",
119
+ " tf.keras.layers.MaxPooling2D((2, 2)),\n",
120
+ " tf.keras.layers.Flatten(),\n",
121
+ " tf.keras.layers.Dense(128, activation='relu'),\n",
122
+ " tf.keras.layers.Dropout(0.5),\n",
123
+ " tf.keras.layers.Dense(train_generator.num_classes, activation='softmax')\n",
124
+ "])\n"
125
+ ]
126
+ },
127
+ {
128
+ "cell_type": "code",
129
+ "execution_count": 8,
130
+ "metadata": {},
131
+ "outputs": [],
132
+ "source": [
133
+ "# Compile the model\n",
134
+ "model.compile(optimizer='adam',\n",
135
+ " loss='categorical_crossentropy',\n",
136
+ " metrics=['accuracy'])\n"
137
+ ]
138
+ },
139
+ {
140
+ "cell_type": "code",
141
+ "execution_count": 9,
142
+ "metadata": {},
143
+ "outputs": [
144
+ {
145
+ "name": "stdout",
146
+ "output_type": "stream",
147
+ "text": [
148
+ "Epoch 1/15\n",
149
+ "12/12 [==============================] - 753s 64s/step - loss: 17.4665 - accuracy: 0.2431 - val_loss: 1.4717 - val_accuracy: 0.2883\n",
150
+ "Epoch 2/15\n",
151
+ "12/12 [==============================] - 694s 60s/step - loss: 1.1907 - accuracy: 0.4779 - val_loss: 1.4602 - val_accuracy: 0.2798\n",
152
+ "Epoch 3/15\n",
153
+ "12/12 [==============================] - 704s 61s/step - loss: 0.8829 - accuracy: 0.6575 - val_loss: 1.5343 - val_accuracy: 0.2791\n",
154
+ "Epoch 4/15\n",
155
+ "12/12 [==============================] - 697s 61s/step - loss: 0.4633 - accuracy: 0.8398 - val_loss: 1.7458 - val_accuracy: 0.3206\n",
156
+ "Epoch 5/15\n",
157
+ "12/12 [==============================] - 690s 60s/step - loss: 0.2428 - accuracy: 0.9309 - val_loss: 2.3506 - val_accuracy: 0.3536\n",
158
+ "Epoch 6/15\n",
159
+ "12/12 [==============================] - 698s 61s/step - loss: 0.1575 - accuracy: 0.9558 - val_loss: 2.2596 - val_accuracy: 0.3588\n",
160
+ "Epoch 7/15\n",
161
+ "12/12 [==============================] - 694s 61s/step - loss: 0.1069 - accuracy: 0.9696 - val_loss: 1.9421 - val_accuracy: 0.3272\n",
162
+ "Epoch 8/15\n",
163
+ "12/12 [==============================] - 694s 61s/step - loss: 0.0688 - accuracy: 0.9807 - val_loss: 3.2596 - val_accuracy: 0.3711\n",
164
+ "Epoch 9/15\n",
165
+ "12/12 [==============================] - 685s 61s/step - loss: 0.1024 - accuracy: 0.9696 - val_loss: 2.0157 - val_accuracy: 0.3722\n",
166
+ "Epoch 10/15\n",
167
+ "12/12 [==============================] - 699s 61s/step - loss: 0.0556 - accuracy: 0.9890 - val_loss: 2.7399 - val_accuracy: 0.3430\n",
168
+ "Epoch 11/15\n",
169
+ "12/12 [==============================] - 696s 61s/step - loss: 0.0561 - accuracy: 0.9862 - val_loss: 2.4300 - val_accuracy: 0.3831\n",
170
+ "Epoch 12/15\n",
171
+ "12/12 [==============================] - 684s 60s/step - loss: 0.0320 - accuracy: 0.9917 - val_loss: 2.5653 - val_accuracy: 0.3511\n",
172
+ "Epoch 13/15\n",
173
+ "12/12 [==============================] - 681s 61s/step - loss: 0.0493 - accuracy: 0.9890 - val_loss: 2.8736 - val_accuracy: 0.3515\n",
174
+ "Epoch 14/15\n",
175
+ "12/12 [==============================] - 689s 60s/step - loss: 0.0213 - accuracy: 0.9917 - val_loss: 3.0044 - val_accuracy: 0.3704\n",
176
+ "Epoch 15/15\n",
177
+ "12/12 [==============================] - 692s 62s/step - loss: 0.0407 - accuracy: 0.9917 - val_loss: 2.8754 - val_accuracy: 0.3838\n"
178
+ ]
179
+ }
180
+ ],
181
+ "source": [
182
+ "# Train the model\n",
183
+ "history = model.fit(\n",
184
+ " train_generator,\n",
185
+ " steps_per_epoch=train_generator.samples//batch_size,\n",
186
+ " epochs=15,\n",
187
+ " validation_data=val_generator,\n",
188
+ " validation_steps=val_generator.samples//batch_size)"
189
+ ]
190
+ },
191
+ {
192
+ "attachments": {},
193
+ "cell_type": "markdown",
194
+ "metadata": {},
195
+ "source": [
196
+ "Saving The Model"
197
+ ]
198
+ },
199
+ {
200
+ "cell_type": "code",
201
+ "execution_count": 11,
202
+ "metadata": {},
203
+ "outputs": [],
204
+ "source": [
205
+ "# Save the trained model in the current directory\n",
206
+ "model.save(os.path.join(current_dir, 'model.h5'))"
207
+ ]
208
+ }
209
+ ],
210
+ "metadata": {
211
+ "kernelspec": {
212
+ "display_name": "base",
213
+ "language": "python",
214
+ "name": "python3"
215
+ },
216
+ "language_info": {
217
+ "codemirror_mode": {
218
+ "name": "ipython",
219
+ "version": 3
220
+ },
221
+ "file_extension": ".py",
222
+ "mimetype": "text/x-python",
223
+ "name": "python",
224
+ "nbconvert_exporter": "python",
225
+ "pygments_lexer": "ipython3",
226
+ "version": "3.8.8"
227
+ },
228
+ "orig_nbformat": 4
229
+ },
230
+ "nbformat": 4,
231
+ "nbformat_minor": 2
232
+ }