miittnnss's picture
Update app.py
53f445e
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
# Initialize the DialoGPT model and tokenizer
model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
tokenizer = AutoTokenizer.from_pretrained("gpt2")
chat_history = None
def chat(message):
global chat_history
# Encode the user's message with the GPT-2 tokenizer
input_ids = tokenizer.encode(message + tokenizer.eos_token, return_tensors="pt")
# Generate a response from DialoGPT-medium
response_ids = model.generate(input_ids, max_length=150, pad_token_id=tokenizer.eos_token_id, num_return_sequences=1)
# Decode and return the bot's response
bot_response = tokenizer.decode(response_ids[0], skip_special_tokens=True)
chat_history = bot_response # Store the bot's response for reference
return bot_response
# Create and launch the Gradio interface
iface = gr.Interface(
fn=chat,
title="UrFriendly Chatbot",
description="UrFriendly Chatbot is a conversational assistant based on DialoGPT-medium with GPT-2 tokenization. Type or click on one of the examples to get started. Please note that UrFriendly Chatbot is not 100% accurate, so incorrect information may generate. πŸ’¬πŸ€—",
examples=[
"Howdy!",
"Tell me a joke.",
"Explain quantum computing in simple terms.",
"How are you?",
"What is an exponent in mathematics?",
"Does money buy happiness?"
],
inputs="text",
outputs="text",
live=True # Set to True to allow continuous conversation
)
iface.launch()