Spaces:
Running
Running
import os | |
from PIL import Image | |
import google.generativeai as genai | |
import gradio as gr | |
# Configure Google API Key and model | |
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY") | |
genai.configure(api_key=GOOGLE_API_KEY) | |
MODEL_ID = "gemini-1.5-pro-latest" | |
model = genai.GenerativeModel(MODEL_ID) | |
example_model = genai.GenerativeModel( | |
MODEL_ID, | |
system_instruction=[ | |
"You are an advocate against gender-based violence.", | |
"Analyze the content for signs of gender discrimination and provide actionable advice." | |
], | |
) | |
# Set model parameters | |
generation_config = genai.GenerationConfig( | |
temperature=0.9, | |
top_p=1.0, | |
top_k=32, | |
candidate_count=1, | |
max_output_tokens=8192, | |
) | |
# Safety and instruction settings | |
safety_settings = { | |
genai.types.HarmCategory.HARM_CATEGORY_HARASSMENT: genai.types.HarmBlockThreshold.BLOCK_LOW_AND_ABOVE, | |
genai.types.HarmCategory.HARM_CATEGORY_HATE_SPEECH: genai.types.HarmBlockThreshold.BLOCK_LOW_AND_ABOVE, | |
genai.types.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: genai.types.HarmBlockThreshold.BLOCK_LOW_AND_ABOVE, | |
genai.types.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: genai.types.HarmBlockThreshold.BLOCK_LOW_AND_ABOVE, | |
} | |
def analyze_text(text): | |
prompt = f"Analyze this text for any instances of gender-based discrimination and provide tips: {text}" | |
response = example_model.generate_content( | |
[prompt], | |
generation_config=generation_config, | |
safety_settings=safety_settings, | |
) | |
return response.text if response else "No response generated." | |
def analyze_image(image: Image.Image) -> str: | |
"""Analyzes the uploaded image for gender-based discrimination.""" | |
prompt = "Analyze this image for any instances of gender-based discrimination and provide actionable advice." | |
resized_image = preprocess_image(image) # Resize the image to the required size | |
response = example_model.generate_content( | |
[prompt, resized_image], | |
generation_config=generation_config, | |
safety_settings=safety_settings, | |
) | |
return response.text if response else "No response generated." | |
def preprocess_image(image: Image.Image) -> Image.Image: | |
"""Resizes the image maintaining aspect ratio.""" | |
image_height = int(image.height * 512 / image.width) | |
return image.resize((512, image_height)) | |
# Example scenarios for gender discrimination analysis | |
example_scenarios = [ | |
"During a team meeting, whenever a female colleague tried to express her opinion, she was often interrupted or talked over by male colleagues.", | |
"The feedback given to female employees often focuses more on their demeanor and less on their actual accomplishments.", | |
"Male employees are more frequently considered for promotions and challenging projects, even when female employees have similar or superior qualifications.", | |
"During a hiring panel, female candidates were often asked about their personal life, family plans, and how they would balance home and work.", | |
"There are significant wage discrepancies between male and female employees who hold the same position and possess comparable experience.", | |
"Some male colleagues often make inappropriate jokes or comments about female employees' appearances and attire." | |
] | |
example_images = [["ex1.jpg"],["ex2.png"]] | |
css_style = """ | |
body, .gradio-container { | |
background-color: #020308; /* Replace with your preferred color */ | |
} | |
#logo { | |
display: flex; | |
justify-content: center; | |
font-size: 3em; | |
font-weight: bold; | |
letter-spacing: 3px; | |
} | |
.letter { | |
opacity: 0; | |
animation: fadeIn 0.1s forwards; | |
} | |
.letter.j { animation-delay: 0s; color: #4285F4; } /* Blue */ | |
.letter.u { animation-delay: 0.1s; color: #3A9CF1; } | |
.letter.s { animation-delay: 0.2s; color: #32B3EE; } | |
.letter.t { animation-delay: 0.3s; color: #2BC9EA; } | |
.letter.e { animation-delay: 0.4s; color: #23E0E7; } | |
.letter.v { animation-delay: 0.5s; color: #1BF7E4; } | |
.letter.a { animation-delay: 0.6s; color: #14F0B5; } /* Greenish */ | |
@keyframes fadeIn { | |
0% { opacity: 0; transform: translateY(-20px); } | |
100% { opacity: 1; transform: translateY(0); } | |
} | |
""" | |
# Gradio interface setup | |
with gr.Blocks(css=css_style) as app: | |
gr.HTML(""" | |
<div id="logo"> | |
<span class="letter j">J</span> | |
<span class="letter u">u</span> | |
<span class="letter s">s</span> | |
<span class="letter t">t</span> | |
<span class="letter e">E</span> | |
<span class="letter v">v</span> | |
<span class="letter a">a</span> | |
</div> | |
""") | |
gr.Markdown("<h1 style='text-align: center; color:#f0f0f0;'>Promotes Gender Equality in Every Conversation</h1>") | |
gr.Markdown("<p style='text-align: center; font-size: 16px; color: #f0f0f0;'>Powered by Gemini to advocate against gender-based violence</p>") | |
with gr.Tab("Text Analysis"): | |
text_input = gr.Textbox(label="Enter Text or Select an Example", placeholder="Type here or select an example...", lines=4) | |
text_output = gr.Textbox(label="Analysis Output", lines=6) | |
analyze_text_btn = gr.Button("Analyze Text") | |
examples = gr.Examples( | |
examples=example_scenarios, | |
inputs=text_input, | |
outputs=text_output | |
) | |
analyze_text_btn.click(analyze_text, inputs=text_input, outputs=text_output) | |
with gr.Tab("Image Analysis"): | |
image_input = gr.Image(label="Upload Image(e.g., screenshot, photos, etc.)", type="pil") | |
image_output = gr.Textbox(label="Analysis Output", lines=6) | |
analyze_image_btn = gr.Button("Analyze Image") | |
examples = gr.Examples( | |
examples=example_images, | |
inputs=image_input, | |
outputs=image_output | |
) | |
analyze_image_btn.click(analyze_image, inputs=image_input, outputs=image_output) | |
app.launch() |