Adityyaa's picture
Update app.py
cb77199 verified
import streamlit as st
from transformers import AutoModelForCausalLM, AutoTokenizer
# Load the pretrained model and tokenizer separately
model = AutoModelForCausalLM.from_pretrained("Adityyaa/Mistral-7b_finetuned_mental_health")
tokenizer = AutoTokenizer.from_pretrained("Adityyaa/Mistral-7b_finetuned_mental_health")
# Define the Streamlit app
def main():
st.title("Mental Health Chatbot")
st.write("Enter your message below and the chatbot will respond.")
user_input = st.text_input("You:", "")
if st.button("Send"):
if user_input:
# Generate response from the chatbot
input_ids = tokenizer.encode(user_input, return_tensors="pt")
response = model.generate(input_ids, max_length=50, num_return_sequences=1)
response_text = tokenizer.decode(response[0], skip_special_tokens=True)
st.text_area("Chatbot:", response_text)
else:
st.warning("Please enter a message.")
if __name__ == "__main__":
main()