Texfile_Q_And_A / app.py
mayankchugh-learning's picture
Update app.py
98b2f71 verified
raw
history blame
1.52 kB
# Use a pipeline as a high-level helper
from transformers import pipeline
import torch
import gradio as gr
question_answering = pipeline("question-answering", model="deepset/roberta-base-squad2")
# model_path = "../Models/models--deepset--roberta-base-squad2/snapshots/cbf50ba81465d4d8676b8bab348e31835147541b"
# question_answering = pipeline("question-answering", model=model_path)
def read_file(file_obj):
"""
Reads the contents of a given file object.
Args:
file_obj (file): The file object to be read.
Returns:
str: The contents of the file.
"""
try:
with open(file_obj.name, 'r', encoding='utf-8') as file:
context = file.read()
return context
except Exception as e:
return f"Error: Unable to read the file. {e}"
def get_answer(file_obj, question):
context = read_file(file_obj=file_obj)
answer = question_answering(question=question, context=context)
return answer["answer"]
gr.close_all()
demo = gr.Interface(fn=get_answer,
inputs=[gr.File(label="Upload your file" ), gr.Textbox(label="Input your question here...", lines=2 )],
outputs=[gr.Textbox(label="Answer Text", lines=2)],
title="@IT AI Enthusiast (https://www.youtube.com/@itaienthusiast/) - Project 5: DocuQ&A",
description="This application will be used to Ask Questions Based On The Context Given To It",
concurrency_limit=16)
demo.launch()