Wild Fire CNN Accuracy 95
Wild Fire CNN Accuracy 95
import time
import shutil
import pathlib
import itertools
from PIL import Image
import random
# Ignore Warnings
import warnings
warnings.filterwarnings("ignore")
modules loaded
plt.show()
# Display one random sample from train, validation, and test sets
print("Training set samples:")
display_random_sample(train_dir)
dir = '/kaggle/input/wildfire-prediction-dataset/train'
x_train = []
y_train = []
for direct in os.listdir(dir):
print("Loading dataset training {}".format(direct))
for filename in os.listdir(os.path.join(dir,direct)):
img_path = os.path.join(dir,direct,filename)
img = cv2.imread(img_path)
img = cv2.resize(img, (32,32))
img = np.array(img)
img = img/255
x_train.append(img)
y_train.append(direct)
dir_val = '/kaggle/input/wildfire-prediction-dataset/valid'
x_val=[]
y_val=[]
for direct in os.listdir(dir_val):
print("Loading dataset validation {}".format(direct))
for filename in os.listdir(os.path.join(dir_val,direct)):
img_path = os.path.join(dir_val,direct,filename)
image = cv2.imread(img_path)
image = cv2.resize(image,(32,32))
image = np.array(image)
image = image/255
x_val.append(image)
y_val.append(direct)
dir_test = '/kaggle/input/wildfire-prediction-dataset/test'
x_test=[]
y_test=[]
for direct in os.listdir(dir_test):
print("Loading dataset test {}".format(direct))
for filename in os.listdir(os.path.join(dir_test,direct)):
img_path = os.path.join(dir_test,direct,filename)
image = cv2.imread(img_path)
image = cv2.resize(image,(32,32))
image = np.array(image)
image = image/255
x_test.append(image)
y_test.append(direct)
y_train[30000]
'nowildfire'
y_train = np.array(y_train)
y_val = np.array(y_val)
y_test = np.array(y_test)
len(x_train[4][4])
32
Flatten(),
Dense(128, activation='relu'),
Dense(128, activation='relu'),
BatchNormalization(),
Dropout(0.5),
Dense(64, activation='relu'),
Dense(32, activation='relu'),
BatchNormalization(),
model.summary()
Model: "sequential_20"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━
━━━━━┓
┃ Layer (type) ┃ Output Shape ┃
Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━
━━━━━┩
│ conv2d_133 (Conv2D) │ (None, 30, 30, 32) │
896 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_134 (Conv2D) │ (None, 30, 30, 64) │
18,496 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_135 (Conv2D) │ (None, 30, 30, 64) │
36,928 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ batch_normalization_59 │ (None, 30, 30, 64) │
256 │
│ (BatchNormalization) │ │
│
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ max_pooling2d_42 (MaxPooling2D) │ (None, 15, 15, 64) │
0 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_136 (Conv2D) │ (None, 15, 15, 128) │
73,856 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_137 (Conv2D) │ (None, 15, 15, 128) │
147,584 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ batch_normalization_60 │ (None, 15, 15, 128) │
512 │
│ (BatchNormalization) │ │
│
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ max_pooling2d_43 (MaxPooling2D) │ (None, 7, 7, 128) │
0 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_138 (Conv2D) │ (None, 7, 7, 256) │
295,168 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_139 (Conv2D) │ (None, 7, 7, 256) │
590,080 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ batch_normalization_61 │ (None, 7, 7, 256) │
1,024 │
│ (BatchNormalization) │ │
│
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ max_pooling2d_44 (MaxPooling2D) │ (None, 3, 3, 256) │
0 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_140 (Conv2D) │ (None, 3, 3, 128) │
295,040 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ conv2d_141 (Conv2D) │ (None, 3, 3, 128) │
147,584 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ flatten_20 (Flatten) │ (None, 1152) │
0 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ dense_78 (Dense) │ (None, 128) │
147,584 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ dense_79 (Dense) │ (None, 128) │
16,512 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ batch_normalization_62 │ (None, 128) │
512 │
│ (BatchNormalization) │ │
│
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ dropout (Dropout) │ (None, 128) │
0 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ dense_80 (Dense) │ (None, 64) │
8,256 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ dense_81 (Dense) │ (None, 32) │
2,080 │
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ batch_normalization_63 │ (None, 32) │
128 │
│ (BatchNormalization) │ │
│
├─────────────────────────────────┼────────────────────────┼──────────
─────┤
│ dense_82 (Dense) │ (None, 1) │
33 │
└─────────────────────────────────┴────────────────────────┴──────────
─────┘
Epoch 1/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 20s 23ms/step - accuracy: 0.8795 - loss:
0.2910 - val_accuracy: 0.6083 - val_loss: 1.1794
Epoch 2/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 13ms/step - accuracy: 0.9252 - loss:
0.1913 - val_accuracy: 0.9024 - val_loss: 0.2299
Epoch 3/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 12ms/step - accuracy: 0.9321 - loss:
0.1734 - val_accuracy: 0.5735 - val_loss: 0.9520
Epoch 4/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 12ms/step - accuracy: 0.9377 - loss:
0.1574 - val_accuracy: 0.8211 - val_loss: 0.3225
Epoch 5/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 12ms/step - accuracy: 0.9442 - loss:
0.1464 - val_accuracy: 0.9543 - val_loss: 0.1255
Epoch 6/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 12ms/step - accuracy: 0.9477 - loss:
0.1359 - val_accuracy: 0.9540 - val_loss: 0.1189
Epoch 7/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 13ms/step - accuracy: 0.9456 - loss:
0.1413 - val_accuracy: 0.9365 - val_loss: 0.1692
Epoch 8/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 12ms/step - accuracy: 0.9511 - loss:
0.1278 - val_accuracy: 0.9525 - val_loss: 0.1287
Epoch 9/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 13ms/step - accuracy: 0.9564 - loss:
0.1186 - val_accuracy: 0.8160 - val_loss: 0.3986
Epoch 10/20
473/473 ━━━━━━━━━━━━━━━━━━━━ 6s 12ms/step - accuracy: 0.9560 - loss:
0.1141 - val_accuracy: 0.9478 - val_loss: 0.1521