nehapasricha94 commited on
Commit
7fda483
1 Parent(s): 088206d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -5
app.py CHANGED
@@ -175,11 +175,24 @@ def analyze_emotion_from_image(image):
175
  try:
176
  # Ensure the input image is a PIL image
177
  print(f"Initial input type: {type(image)}")
178
- if isinstance(image, dict) and "path" in image:
179
- image = Image.open(requests.get(image["path"], stream=True).raw)
 
 
 
 
 
180
  print("Loaded image from URL.")
 
 
 
 
 
 
 
 
181
  elif isinstance(image, np.ndarray):
182
- image = Image.fromarray(image)
183
  print("Converted image from NumPy array.")
184
 
185
  print(f"Image size: {image.size}, mode: {image.mode}")
@@ -206,8 +219,6 @@ def analyze_emotion_from_image(image):
206
 
207
 
208
 
209
-
210
-
211
  # Gradio interface to upload image files and perform analysis
212
  iface = gr.Interface(fn=analyze_emotion_from_image, inputs="image", outputs="text")
213
 
 
175
  try:
176
  # Ensure the input image is a PIL image
177
  print(f"Initial input type: {type(image)}")
178
+
179
+ # Check if the input is a URL (string)
180
+ if isinstance(image, str):
181
+ print(f"Loading image from URL: {image}")
182
+ response = requests.get(image, stream=True)
183
+ response.raise_for_status() # Raise an error for bad responses
184
+ image = Image.open(response.raw).convert("RGB") # Convert to RGB
185
  print("Loaded image from URL.")
186
+
187
+ # Check if the input is a dictionary (for blob data)
188
+ elif isinstance(image, dict) and "blob" in image:
189
+ blob_data = image["blob"]
190
+ image = Image.open(blob_data).convert("RGB") # Convert to RGB
191
+ print("Loaded image from Blob data.")
192
+
193
+ # Check if the input is a NumPy array
194
  elif isinstance(image, np.ndarray):
195
+ image = Image.fromarray(image).convert("RGB") # Convert to RGB
196
  print("Converted image from NumPy array.")
197
 
198
  print(f"Image size: {image.size}, mode: {image.mode}")
 
219
 
220
 
221
 
 
 
222
  # Gradio interface to upload image files and perform analysis
223
  iface = gr.Interface(fn=analyze_emotion_from_image, inputs="image", outputs="text")
224