MindMap / app.py
raannakasturi's picture
Update app.py
a99ee48 verified
raw
history blame
2.57 kB
import os
import sys
from generate_markdown import load_llm_model, generate_markdown
from generate_mindmap import generate_mindmap_svg
import gradio as gr
import subprocess
llm = load_llm_model()
def generate(file):
summary = "This is a summary of the research paper"
mindmap_markdown = generate_markdown(llm, file)
mindmap_svg = generate_mindmap_svg(mindmap_markdown)
return summary, mindmap_markdown, mindmap_svg
theme = gr.themes.Soft(
primary_hue="purple",
secondary_hue="cyan",
neutral_hue="slate",
font=[gr.themes.GoogleFont('Syne'), gr.themes.GoogleFont('poppins'), gr.themes.GoogleFont('poppins'), gr.themes.GoogleFont('poppins')],
)
with gr.Blocks(theme=theme, title="Binary Biology") as app:
file = gr.File(file_count='single', label='Upload Research Paper PDF file')
summary = gr.TextArea(label='Summary', lines=5, interactive=False, show_copy_button=True)
markdown_mindmap = gr.Textbox(label='Mindmap', lines=5, interactive=False, show_copy_button=True)
graphical_mindmap = gr.Image(label='Graphical Mindmap', interactive=False, show_download_button=True)
submit = gr.Button(value='Submit')
submit.click(generate,
inputs=[file],
outputs=[summary, markdown_mindmap, graphical_mindmap],
scroll_to_output=True,
show_progress=True,
queue=True,
)
if __name__ == "__main__":
# run entire command: "CMAKE_ARGS="-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS" pip install llama-cpp-python"
try:
env = os.environ.copy()
env["CMAKE_ARGS"] = "-DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS"
cmd = ["pip", "install", "llama-cpp-python"]
subprocess.run(cmd, env=env)
except:
cmd = ["pip", "install", "llama-cpp-python"]
subprocess.run(cmd)
try:
try:
subprocess.run(['apt', 'install', '-y', 'graphviz'])
print("Graphviz installed successfully")
except:
subprocess.run(['sudo', 'apt', 'install', '-y', 'graphviz'])
print("Graphviz installed successfully")
except:
print("Graphviz installation failed")
sys.exit(1)
print("Model loaded successfully")
llm = load_llm_model()
print("Model loaded successfully")
app.queue(default_concurrency_limit=5).launch(show_error=True)
# summary, markdown_mindmap, graphical_mindmap = generate("cr1c00107.pdf")
# print(summary)
# print("\n\n")
# print(markdown_mindmap)
# print("\n\n")
# print(graphical_mindmap)