Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| import json | |
| import markdown | |
| from telegraph import Telegraph | |
| from gradio_client import Client | |
| import time | |
| # Set up the Telegraph client | |
| telegraph = Telegraph() | |
| telegraph.create_account(short_name='BookMindAI') | |
| with open('detail_queries.json', 'r') as file: | |
| detail_queries = json.load(file) | |
| with open('lang.json', 'r') as file: | |
| languages = [str(x) for x in json.load(file).keys()] | |
| def markdown_to_html(md_content): | |
| return markdown.markdown(md_content) | |
| def predict(input, images = []): | |
| client = Client("https://roboflow-gemini.hf.space/--replicas/bkd57/") | |
| result = client.predict( | |
| None, | |
| images, | |
| 0.4, | |
| 2048, | |
| "", | |
| 32, | |
| 1, | |
| [[input,None]], | |
| api_name="/bot" | |
| ) | |
| return result[0][1] | |
| def fetch_summary(book_name, author, language): | |
| question = f"Provide a short summary of the book '{book_name}' by {author} in {language} language." | |
| answer = predict(question) | |
| return answer | |
| def post_to_telegraph(title, content): | |
| html_content = markdown_to_html(content) | |
| response = telegraph.create_page( | |
| title=title, | |
| html_content=html_content | |
| ) | |
| return 'https://telegra.ph/{}'.format(response['path']) | |
| def generate_predictions(book_name, author, language_choice, detail_options=[]): | |
| details = "" | |
| for option in detail_options: | |
| query_template = detail_queries.get(option).format(book_name=book_name, author=author) + '. Answer in ' + language_choice[3:] | |
| try: | |
| response = predict(query_template) | |
| details += f"\n\n**{option}**:\n{response}" | |
| except: | |
| time.sleep(2) | |
| try: | |
| response = predict(query_template) | |
| details += f"\n\n**{option}**:\n{response}" | |
| except: | |
| pass | |
| summary = fetch_summary(book_name, author, language_choice[3:]) | |
| combined_summary = summary + details | |
| try: | |
| telegraph_url = post_to_telegraph(f"Summary of {book_name} by {author}", combined_summary) | |
| except requests.exceptions.ConnectionError: | |
| telegraph_url = "Error connecting to Telegraph API" | |
| return combined_summary, telegraph_url | |
| with gr.Blocks(title="๐ BookMindAI", theme=gr.themes.Base()).queue() as demo: | |
| gr.DuplicateButton() | |
| with gr.Tab("Summarize book๐ฏ"): | |
| with gr.Row(): | |
| with gr.Column(): | |
| book_name_input = gr.Textbox(placeholder="Enter Book Name", label="Book Name") | |
| author_name_input = gr.Textbox(placeholder="Enter Author Name", label="Author Name") | |
| language_input = gr.Dropdown(choices=languages, label="Language") | |
| detail_options_input = gr.CheckboxGroup(choices=list(detail_queries.keys()), label="Details to Include", visible=True) | |
| run_button_summarize = gr.Button("Run", visible=True) | |
| with gr.Column(): | |
| telegraph_link_output = gr.Markdown(label="View on Telegraph", visible=True) | |
| with gr.Row(): | |
| summary_output = gr.Markdown(label="Parsed Content", visible=True) | |
| run_button_summarize.click(fn=generate_predictions, | |
| inputs=[book_name_input, author_name_input, language_input, detail_options_input], | |
| outputs=[summary_output, telegraph_link_output], | |
| show_progress=True, queue=True) | |
| examples_summarize = [ | |
| ["Harry Potter and the Philosopher's Stone", "J.K. Rowling", "๐ฌ๐ง english"], | |
| ["Pride and Prejudice", "Jane Austen", "๐บ๐ฆ ukrainian"], | |
| ["The Great Gatsby", "F. Scott Fitzgerald", "๐ซ๐ท french"] | |
| ] | |
| gr.Examples(examples=examples_summarize, inputs=[book_name_input, author_name_input, language_input, detail_options_input]) | |
| with gr.Tab("Talk about book๐"): | |
| chat_examples = [ | |
| "How do the underlying themes of a book reflect the societal values and beliefs of its time?", | |
| "In what ways do the characters' personal journeys mirror the broader human experience?" | |
| ] | |
| def chat_response(message, history): | |
| for i in range(len(message)): | |
| response = predict(message) | |
| yield response | |
| chat_interface = gr.ChatInterface(chat_response, examples=chat_examples, title='Talk with Gemini PRO about any book.') | |
| demo.launch() |