Spaces:
Sleeping
Sleeping
Initialize email_text variable in update_ui function to prevent potential reference errors. This change ensures that the email_text is defined before use, contributing to a more robust and error-free email generation process.
9d54669
import streamlit as st | |
from huggingface_hub import InferenceClient | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
import os | |
from PyPDF2 import PdfReader | |
import docx | |
import re | |
import smtplib | |
from email.mime.multipart import MIMEMultipart | |
from email.mime.text import MIMEText | |
from email.mime.base import MIMEBase | |
from email import encoders | |
from typing import Dict | |
def extract_cv_text(file): | |
"""Extract text from PDF or DOCX CV files.""" | |
if file is None: | |
return "No CV uploaded" | |
file_ext = os.path.splitext(file.name)[1].lower() | |
text = "" | |
try: | |
if file_ext == '.pdf': | |
reader = PdfReader(file) | |
for page in reader.pages: | |
text += page.extract_text() | |
elif file_ext == '.docx': | |
doc = docx.Document(file) | |
for paragraph in doc.paragraphs: | |
text += paragraph.text + '\n' | |
else: | |
return "Unsupported file format. Please upload PDF or DOCX files." | |
return text # Return the full text instead of parsed sections | |
except Exception as e: | |
return f"Error processing file: {str(e)}" | |
# Replace 'your_huggingface_token' with your actual Hugging Face access token | |
access_token = os.getenv('API_KEY') | |
# Initialize the inference client (if needed for other API-based tasks) | |
client = InferenceClient(token=access_token) | |
def create_email_prompt(job_description: str, cv_text: str) -> str: | |
"""Create a detailed prompt for email generation.""" | |
return f"""Job Description: | |
{job_description} | |
Your CV Details: | |
{cv_text} | |
Instructions: Write a professional job application email following these guidelines: | |
1. Start with a proper greeting | |
2. First paragraph: Express interest in the position and mention how you found it | |
3. Second paragraph: Highlight 2-3 most relevant experiences from your CV that match the job requirements | |
4. Third paragraph: Mention specific skills that align with the role | |
5. Closing paragraph: Express enthusiasm for an interview. Use the exact contact information provided in the CV - do not use placeholders like [phone] or [email] | |
6. End with a professional closing | |
Important: Use the exact contact details and information from the CV. Do not generate or make up any placeholder information. | |
Keep the tone professional, confident, and enthusiastic. Be concise but impactful. | |
Email:""" | |
def conversation_predict(input_text: str, cv_text: str): | |
"""Generate a response using the model with streaming output.""" | |
prompt = create_email_prompt(input_text, cv_text) | |
# Use the streaming API | |
try: | |
for response in client.text_generation( | |
model="google/gemma-2b-it", | |
prompt=prompt, | |
max_new_tokens=512, | |
temperature=0.7, | |
top_p=0.95, | |
stream=True | |
): | |
# The streaming response returns text directly | |
yield response | |
except Exception as e: | |
st.error(f"Error generating response: {str(e)}") | |
yield "" | |
def respond( | |
message: str, | |
history: list[tuple[str, str]], | |
system_message: str, | |
cv_file, | |
max_tokens: int, | |
temperature: float, | |
top_p: float, | |
): | |
"""Generate a response for a multi-turn chat conversation.""" | |
# Extract CV text and update system message | |
cv_text = extract_cv_text(cv_file) if cv_file else "No CV provided" | |
updated_system_message = f"""Task: Write a professional job application email. | |
CV Summary: | |
{cv_text} | |
{system_message}""" | |
messages = [{"role": "system", "content": updated_system_message}] | |
for user_input, assistant_reply in history: | |
if user_input: | |
messages.append({"role": "user", "content": user_input}) | |
if assistant_reply: | |
messages.append({"role": "assistant", "content": assistant_reply}) | |
messages.append({"role": "user", "content": message}) | |
response = "" | |
for message_chunk in client.chat_completion( | |
messages=messages, | |
max_tokens=max_tokens, | |
stream=True, | |
temperature=temperature, | |
top_p=top_p, | |
): | |
token = message_chunk["choices"][0]["delta"].get("content", "") | |
response += token | |
yield response | |
# Function to send the email with attachment | |
def send_email(sender_email: str, receiver_email: str, subject: str, body: str, attachment_path: str): | |
"""Send email with CV attachment.""" | |
try: | |
msg = MIMEMultipart() | |
msg['From'] = sender_email | |
msg['To'] = receiver_email | |
msg['Subject'] = subject | |
msg.attach(MIMEText(body, 'plain')) | |
# Attach the CV file | |
if attachment_path: | |
attachment = open(attachment_path, "rb") | |
part = MIMEBase('application', 'octet-stream') | |
part.set_payload(attachment.read()) | |
encoders.encode_base64(part) | |
part.add_header('Content-Disposition', f'attachment; filename={os.path.basename(attachment_path)}') | |
msg.attach(part) | |
# Set up the server and send the email | |
server = smtplib.SMTP('smtp.gmail.com', 587) | |
server.starttls() | |
server.login(sender_email, os.getenv('EMAIL_PASSWORD')) # Replace with your email credentials | |
text = msg.as_string() | |
server.sendmail(sender_email, receiver_email, text) | |
server.quit() | |
st.success("Email sent successfully!") | |
except Exception as e: | |
st.error(f"Error sending email: {str(e)}") | |
# Streamlit UI section | |
st.title("AI Job Application Email Generator") | |
def update_ui(message, cv_file, cv_text): | |
"""Handle the UI updates for email generation.""" | |
# Create placeholder for the generated email | |
email_placeholder = st.empty() | |
email_text = "" # Initialize email_text before use | |
# Generate button | |
if st.button("Generate Email", key="generate_button"): | |
if message and cv_file and isinstance(cv_text, str) and not cv_text.startswith("Error"): | |
email_text = "" | |
# Stream the response | |
try: | |
with st.spinner('Generating your application email...'): | |
for chunk in conversation_predict(message, cv_text): | |
if chunk: | |
email_text += chunk | |
# Update the text area with each chunk, using timestamp in key | |
email_placeholder.text_area( | |
"Generated Email", | |
value=email_text, | |
height=400 | |
) | |
st.success('Email generated successfully!') | |
except Exception as e: | |
st.error(f"Error during email generation: {str(e)}") | |
else: | |
st.warning("Please upload a CV and enter a job description.") | |
# Email input fields | |
st.markdown("### Sender & Receiver Information") | |
sender_email = st.text_input("Sender's Email Address") | |
receiver_email = st.text_input("Receiver's Email Address") | |
# Email subject | |
subject = st.text_input("Subject", value="Job Application for [Position Name]") | |
# Option to edit the generated email | |
email_body = st.text_area("Edit the Generated Email (if needed)", value=email_text, height=400) | |
# Send email button | |
if st.button("Send Email"): | |
if sender_email and receiver_email and email_body: | |
send_email(sender_email, receiver_email, subject, email_body, cv_file.name) | |
# Add tabs for different sections | |
tab1, tab2 = st.tabs(["Generate Email", "View CV Details"]) | |
with tab1: | |
# CV file upload | |
cv_file = st.file_uploader("Upload CV (PDF or DOCX)", type=["pdf", "docx"]) | |
if cv_file: | |
cv_text = extract_cv_text(cv_file) | |
if isinstance(cv_text, str) and not cv_text.startswith("Error"): | |
st.success("CV uploaded successfully!") | |
else: | |
st.error(cv_text) | |
cv_text = None | |
else: | |
cv_text = None | |
# Job description input | |
st.markdown("### Job Description") | |
message = st.text_area("Paste the job description here:", height=200) | |
# Call the updated UI function with parameters | |
update_ui(message, cv_file, cv_text) | |
with tab2: | |
if cv_file and isinstance(cv_text, str) and not cv_text.startswith("Error"): | |
st.markdown("### CV Content") | |
st.text_area("Full CV Text", value=cv_text, height=400) | |
else: | |
st.info("Upload a CV to view content") | |