{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "_cell_guid": "b1076dfc-b9ad-4769-8c92-a6c4dae69d19", "_uuid": "8f2839f25d086af736a60e9eeb907d3b93b6e0e5" }, "outputs": [], "source": [ "\n", "import numpy as np \n", "import pandas as pd \n", "import cv2\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "_cell_guid": "79c7e3d0-c299-4dcb-8224-4455121ee9b0", "_uuid": "d629ff2d2480ee46fbb7e2d37f6b5fab8052498a" }, "outputs": [], "source": [ "df = pd.read_csv('../input/facial-expression/fer2013.csv')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "df.head()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "len(df.iloc[0]['pixels'].split())\n", "# 48 * 48" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "label_map = ['Anger', 'Neutral', 'Fear', 'Happy', 'Sad', 'Surprise']" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import matplotlib.pyplot as plt" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "img = df.iloc[0]['pixels'].split()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "img = [int(i) for i in img]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "type(img[0])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "len(img)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "img = np.array(img)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "img = img.reshape(48,48)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "img.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "plt.imshow(img, cmap='gray')\n", "plt.xlabel(df.iloc[0]['emotion'])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "X = []\n", "y = []" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def getData(path):\n", " anger = 0\n", " fear = 0\n", " sad = 0\n", " happy = 0\n", " surprise = 0\n", " neutral = 0\n", " df = pd.read_csv(path)\n", " \n", " X = []\n", " y = [] \n", " \n", " for i in range(len(df)):\n", " if df.iloc[i]['emotion'] != 1:\n", " if df.iloc[i]['emotion'] == 0:\n", " if anger <= 4000: \n", " y.append(df.iloc[i]['emotion'])\n", " im = df.iloc[i]['pixels']\n", " im = [int(x) for x in im.split()]\n", " X.append(im)\n", " anger += 1\n", " else:\n", " pass\n", " \n", " if df.iloc[i]['emotion'] == 2:\n", " if fear <= 4000: \n", " y.append(df.iloc[i]['emotion'])\n", " im = df.iloc[i]['pixels']\n", " im = [int(x) for x in im.split()]\n", " X.append(im)\n", " fear += 1\n", " else:\n", " pass\n", " \n", " if df.iloc[i]['emotion'] == 3:\n", " if happy <= 4000: \n", " y.append(df.iloc[i]['emotion'])\n", " im = df.iloc[i]['pixels']\n", " im = [int(x) for x in im.split()]\n", " X.append(im)\n", " happy += 1\n", " else:\n", " pass\n", " \n", " if df.iloc[i]['emotion'] == 4:\n", " if sad <= 4000: \n", " y.append(df.iloc[i]['emotion'])\n", " im = df.iloc[i]['pixels']\n", " im = [int(x) for x in im.split()]\n", " X.append(im)\n", " sad += 1\n", " else:\n", " pass\n", " \n", " if df.iloc[i]['emotion'] == 5:\n", " if surprise <= 4000: \n", " y.append(df.iloc[i]['emotion'])\n", " im = df.iloc[i]['pixels']\n", " im = [int(x) for x in im.split()]\n", " X.append(im)\n", " surprise += 1\n", " else:\n", " pass\n", " \n", " if df.iloc[i]['emotion'] == 6:\n", " if neutral <= 4000: \n", " y.append(df.iloc[i]['emotion'])\n", " im = df.iloc[i]['pixels']\n", " im = [int(x) for x in im.split()]\n", " X.append(im)\n", " neutral += 1\n", " else:\n", " pass\n", "\n", " \n", " \n", " return X, y \n", " " ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "X, y = getData('../input/facial-expression/fer2013.csv')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "np.unique(y, return_counts=True)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "X = np.array(X)/255.0\n", "y = np.array(y)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "X.shape, y.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "y_o = []\n", "for i in y:\n", " if i != 6:\n", " y_o.append(i)\n", " \n", " else:\n", " y_o.append(1)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "np.unique(y_o, return_counts=True)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "for i in range(5):\n", " r = np.random.randint((1), 24000, 1)[0]\n", " plt.figure()\n", " plt.imshow(X[r].reshape(48,48), cmap='gray')\n", " plt.xlabel(label_map[y_o[r]])" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "X = X.reshape(len(X), 48, 48, 1)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# no_of_images, height, width, coloar_map" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "X.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from keras.utils import to_categorical\n", "y_new = to_categorical(y_o, num_classes=6)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "len(y_o), y_new.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "y_o[150], y_new[150]" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from keras.models import Sequential\n", "from keras.layers import Dense , Activation , Dropout ,Flatten\n", "from keras.layers.convolutional import Conv2D\n", "from keras.layers.convolutional import MaxPooling2D\n", "from keras.metrics import categorical_accuracy\n", "from keras.models import model_from_json\n", "from keras.callbacks import ModelCheckpoint\n", "from keras.optimizers import *\n", "from keras.layers.normalization import BatchNormalization" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "model = Sequential()\n", "\n", "\n", "input_shape = (48,48,1)\n", "\n", "\n", "model.add(Conv2D(64, (5, 5), input_shape=input_shape,activation='relu', padding='same'))\n", "model.add(Conv2D(64, (5, 5), padding='same'))\n", "model.add(BatchNormalization())\n", "model.add(Activation('relu'))\n", "model.add(MaxPooling2D(pool_size=(2, 2)))\n", "\n", "\n", "model.add(Conv2D(128, (5, 5),activation='relu',padding='same'))\n", "model.add(Conv2D(128, (5, 5),padding='same'))\n", "model.add(BatchNormalization())\n", "model.add(Activation('relu'))\n", "model.add(MaxPooling2D(pool_size=(2, 2)))\n", "\n", "model.add(Conv2D(256, (3, 3),activation='relu',padding='same'))\n", "model.add(Conv2D(256, (3, 3),activation='relu',padding='same'))\n", "model.add(BatchNormalization())\n", "model.add(MaxPooling2D(pool_size=(2, 2)))\n", "\n", "## (15, 15) ---> 30\n", "model.add(Flatten())\n", "model.add(Dense(6, activation='softmax'))\n", "\n", "model.compile(loss='categorical_crossentropy', metrics=['accuracy'],optimizer='adam')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "model.fit(X, y_new, epochs=22, batch_size=64, shuffle=True, validation_split=0.2)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "model.save('model.h5')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import cv2" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "test_img = cv2.imread('../input/happy-img-test/pexels-andrea-piacquadio-941693.jpg', 0)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "test_img.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "test_img = cv2.resize(test_img, (48,48))\n", "test_img.shape" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "test_img = test_img.reshape(1,48,48,1)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "model.predict(test_img)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# label_map = ['Anger', 'Neutral', 'Fear', 'Happy', 'Sad', 'Surprise']" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.3" } }, "nbformat": 4, "nbformat_minor": 4 }