nehapasricha94's picture
Update app.py
1994e95 verified
raw
history blame
3.63 kB
import torch
import cv2
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
from transformers import pipeline
import gradio as gr
from sklearn.cluster import KMeans
# Emotion detection pipeline for text (if any text is included in assets)
emotion_classifier = pipeline("text-classification", model="j-hartmann/emotion-english-distilroberta-base", return_all_scores=True)
# Function to analyze colors in an image
def analyze_colors(image):
try:
# Convert image to RGB if not already
if image.mode != "RGB":
image = image.convert("RGB")
# Resize the image for faster processing (small but still sufficient for color analysis)
image = image.resize((150, 150))
# Convert to numpy array
img_array = np.array(image)
# Reshape image to be a list of pixels
pixels = img_array.reshape((-1, 3))
# Check if there are enough pixels to perform KMeans
if len(pixels) < 5:
return "Image has too few pixels for analysis"
# Use KMeans to find the dominant colors
kmeans = KMeans(n_clusters=5, random_state=0)
kmeans.fit(pixels)
dominant_colors = kmeans.cluster_centers_
# Plot the colors for visualization
plt.figure(figsize=(8, 6))
plt.imshow([dominant_colors.astype(int)])
plt.axis('off')
plt.show()
return dominant_colors
except Exception as e:
print(f"Error in analyze_colors: {e}")
return None
# Function to detect emotions from colors (simplified emotion-color mapping)
def color_emotion_analysis(dominant_colors):
try:
emotions = []
for color in dominant_colors:
# Simple logic: darker tones could indicate sadness
if np.mean(color) < 85:
emotions.append("Sadness")
elif np.mean(color) > 170:
emotions.append("Happiness")
else:
emotions.append("Neutral")
return emotions
except Exception as e:
print(f"Error in color_emotion_analysis: {e}")
return ["Error analyzing emotions"]
# Function to analyze patterns and shapes using OpenCV
def analyze_patterns(image):
try:
# Convert to grayscale for edge detection
gray_image = cv2.cvtColor(np.array(image), cv2.COLOR_RGB2GRAY)
edges = cv2.Canny(gray_image, 100, 200)
# Calculate the number of edges (chaos metric)
num_edges = np.sum(edges > 0)
if num_edges > 10000: # Arbitrary threshold for "chaos"
return "Chaotic patterns - possibly distress"
else:
return "Orderly patterns - possibly calm"
except Exception as e:
print(f"Error in analyze_patterns: {e}")
return "Error analyzing patterns"
# Main function to process image and analyze emotional expression
def analyze_emotion_from_image(image):
try:
# Analyze colors
dominant_colors = analyze_colors(image)
if dominant_colors is None:
return "Error analyzing colors"
color_emotions = color_emotion_analysis(dominant_colors)
# Analyze patterns
pattern_analysis = analyze_patterns(image)
return f"Color-based emotions: {color_emotions}\nPattern analysis: {pattern_analysis}"
except Exception as e:
return f"Error processing image: {str(e)}"
# Gradio interface to upload image files and perform analysis
iface = gr.Interface(fn=analyze_emotion_from_image, inputs="image", outputs="text")
# Launch the interface
if __name__ == "__main__":
iface.launch()