!pip install transformers !pip install requests !pip install gradio import re import os #from dotenv import load_dotenv import json import gradio as gr import random import time import requests from transformers import BertModel, BertTokenizerFast, AdamW import tensorflow as tf #load_dotenv(override=True) #if not os.getenv("HF_API_KEY"): # raise ValueError("HF_API_KEY must be set") #hf_key = os.getenv('HF_API_KEY') API_URL = "https://api-inference.huggingface.co/models/t4ai/distilbert-finetuned-t3-qa" #headers = {"Authorization": "Bearer " + hf_key } headers = {} def query_model(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() # contruct UI using Gradio _booted = False with gr.Blocks() as demo: with gr.Row(): with gr.Column(scale=1): context = gr.Textbox(label="Document Text", lines=25) with gr.Column(scale=2): chatbot = gr.Chatbot(label="T3Soft Bot", value=[(None, "Welcome! I am your QA assistant."), (None, "Please paste your document content in the panel to the left."), (None, "Then submit questions below!")]) msg = gr.Textbox(label="Ask your question") clear = gr.ClearButton([msg, chatbot]) _chatbot = chatbot def respond(message, context, chat_history): if(len(context) == 0): bot_message = "Hm, I don't see any document text, please paste in the box on the left." else: query_bot = query_model({"inputs": {"question": message, "context": context}}) if(len(query_bot)): bot_message = query_bot['answer'] else: bot_message = "I'm having trouble with this question, please try again." chat_history.append((message, bot_message)) time.sleep(2) return "", context, chat_history msg.submit(respond, [msg, context, chatbot], [msg, context, chatbot]) demo.launch()