23A475R commited on
Commit
2b8f874
1 Parent(s): 99b7bd9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -17
app.py CHANGED
@@ -18,33 +18,34 @@ EMOTIONS = ['neutral', 'happiness', 'surprise', 'sadness', 'anger', 'disgust', '
18
  classifier = load_model(emotion_model_path)
19
 
20
  def predict_emotion(image):
 
 
21
 
22
- faces = face_detection(image)
 
 
 
23
 
24
- for face in faces:
25
- x,y,w,h = face['box']
 
 
 
26
 
27
- roi = image[y:y+h,x:x+w]
28
-
29
- # Converting the region of interest to grayscale, and resize
30
- roi_gray = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY)
31
- roi_gray = cv2.resize(roi_gray,(48,48),interpolation=cv2.INTER_AREA)
32
-
33
- img = roi_gray.astype('float')/255.0
34
  img = img_to_array(img)
35
- img = np.expand_dims(img,axis=0)
36
 
37
- prediction = classifier.predict(img)[0]
38
- #top_indices = np.argsort(prediction)[-2:]
39
- #top_emotion = top_indices[1]
40
- #second_emotion = top_indices[0]
41
- #label = emotions[top_emotion]
42
- confidences = {emotions[i]: float(prediction[i]) for i in range(len(emotions))}
43
 
44
  return confidences
45
 
46
 
47
 
 
48
  demo = gr.Interface(
49
  fn = predict_emotion,
50
  inputs = gr.Image(type="numpy"),
 
18
  classifier = load_model(emotion_model_path)
19
 
20
  def predict_emotion(image):
21
+ # Convert the image to grayscale
22
+ gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
23
 
24
+ # Detect faces in the grayscale image
25
+ faces = face_detection.detectMultiScale(gray, scaleFactor=1.1,
26
+ minNeighbors=5, minSize=(30, 30),
27
+ flags=cv2.CASCADE_SCALE_IMAGE)
28
 
29
+ confidences = {}
30
+ for (x, y, w, h) in faces:
31
+ # Extract the region of interest (ROI)
32
+ roi_gray = gray[y:y+h, x:x+w]
33
+ roi_gray = cv2.resize(roi_gray, (48, 48), interpolation=cv2.INTER_AREA)
34
 
35
+ # Preprocess the ROI for prediction
36
+ img = roi_gray.astype('float') / 255.0
 
 
 
 
 
37
  img = img_to_array(img)
38
+ img = np.expand_dims(img, axis=0)
39
 
40
+ # Make a prediction for the emotion
41
+ prediction = emotion_classifier.predict(img)[0]
42
+ confidences = {EMOTIONS[i]: float(prediction[i]) for i in range(len(EMOTIONS))}
 
 
 
43
 
44
  return confidences
45
 
46
 
47
 
48
+
49
  demo = gr.Interface(
50
  fn = predict_emotion,
51
  inputs = gr.Image(type="numpy"),