|
import requests |
|
|
|
import random |
|
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM |
|
import torch |
|
import gradio as gr |
|
import pandas as pd |
|
from datetime import datetime |
|
|
|
|
|
emotion_tokenizer = AutoTokenizer.from_pretrained("mrm8488/t5-base-finetuned-emotion") |
|
emotion_model = AutoModelForSeq2SeqLM.from_pretrained("mrm8488/t5-base-finetuned-emotion") |
|
|
|
|
|
import os |
|
token=os.getenv('hftoken') |
|
text_tokenizer = AutoTokenizer.from_pretrained("google/gemma-2-2b-it", token=token) |
|
text_model = AutoModelForCausalLM.from_pretrained("google/gemma-2-2b-it", token=token) |
|
|
|
|
|
device = torch.device('cpu') |
|
emotion_model.to(device) |
|
text_model.to(device) |
|
|
|
|
|
|
|
def get_emotion(text): |
|
input_ids = emotion_tokenizer.encode(text + '</s>', return_tensors='pt').to(device) |
|
output = emotion_model.generate(input_ids=input_ids, max_length=2) |
|
dec = [emotion_tokenizer.decode(ids, skip_special_tokens=True) for ids in output] |
|
label = dec[0].strip() |
|
return label |
|
|
|
def generate_quote(original_text, emotion): |
|
|
|
input_text = f"Text: {original_text}\nEmotion: {emotion}\nInspirational Quote:" |
|
input_ids = text_tokenizer(input_text, return_tensors="pt").to(device) |
|
outputs = text_model.generate(**input_ids, max_new_tokens=70, do_sample=True, temperature=0.7) |
|
generated_text = text_tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
if "Inspirational Quote:" in generated_text: |
|
quote = generated_text.split("Inspirational Quote:")[1].strip().split("\n")[0] |
|
else: |
|
quote = generated_text.strip() |
|
|
|
return quote |
|
|
|
import os |
|
import pandas as pd |
|
|
|
|
|
csv_file = os.path.join(os.getcwd(), 'diary_entries.csv') |
|
if not os.path.exists(csv_file): |
|
df = pd.DataFrame(columns=["Date", "Diary Text", "Emotion", "Quote"]) |
|
df.to_csv(csv_file, index=False) |
|
else: |
|
df = pd.read_csv(csv_file) |
|
|
|
|
|
def journal_interface(Diary): |
|
try: |
|
|
|
emotion = get_emotion(Diary) |
|
|
|
|
|
quote = generate_quote(Diary, emotion) |
|
|
|
|
|
date_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
new_entry = pd.DataFrame([[date_time, Diary, emotion, quote]], columns=["Date", "Diary Text", "Emotion", "Quote"]) |
|
global df |
|
df = pd.concat([df, new_entry], ignore_index=True) |
|
df.to_csv(csv_file, index=False) |
|
|
|
return emotion, quote |
|
except Exception as e: |
|
print(f"Error encountered: {str(e)}") |
|
return f"Error: {str(e)}", "" |
|
|
|
|
|
interface = gr.Interface( |
|
fn=journal_interface, |
|
inputs=gr.Textbox(lines=5, placeholder="Enter your thoughts here..."), |
|
outputs=[ |
|
gr.Textbox(label="Detected Emotion"), |
|
gr.Textbox(label="Generated Quote") |
|
], |
|
title="AI-Powered Personal Journal", |
|
description="Enter your thoughts, and the AI will detect the emotion and generate an inspirational quote based on it.", |
|
theme=gr.themes.Soft() |
|
) |
|
|
|
|
|
interface.launch(share=True) |
|
|