farmax commited on
Commit
7f0656e
1 Parent(s): 93068c0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -26
app.py CHANGED
@@ -96,37 +96,37 @@ def demo():
96
  qa_chain = gr.State()
97
  collection_name = gr.State()
98
 
99
- gr.Markdown("# PDF-based Chatbot Creator")
100
 
101
- with gr.Tab("Step 1 - Upload PDFs"):
102
- document = gr.Files(height=100, file_count="multiple", file_types=["pdf"], interactive=True, label="Upload your PDF documents")
103
 
104
- with gr.Tab("Step 2 - Process Documents"):
105
- db_btn = gr.Radio(["ChromaDB"], label="Vector database type", value="ChromaDB", type="index")
106
- with gr.Accordion("Advanced Options - Document text splitter", open=False):
107
- slider_chunk_size = gr.Slider(100, 1000, 1000, step=20, label="Chunk size")
108
- slider_chunk_overlap = gr.Slider(10, 200, 100, step=10, label="Chunk overlap")
109
- db_progress = gr.Textbox(label="Vector database initialization", value="None")
110
- db_btn = gr.Button("Generate vector database")
111
 
112
- with gr.Tab("Step 3 - Initialize QA chain"):
113
- llm_btn = gr.Radio(list_llm_simple, label="LLM models", value=list_llm_simple[5], type="index")
114
- with gr.Accordion("Advanced options - LLM model", open=False):
115
- slider_temperature = gr.Slider(0.01, 1.0, 0.3, step=0.1, label="Temperature")
116
- slider_maxtokens = gr.Slider(224, 4096, 1024, step=32, label="Max Tokens")
117
- slider_topk = gr.Slider(1, 10, 3, step=1, label="top-k samples")
118
- language_btn = gr.Radio(["Italian", "English"], label="Language", value="Italian", type="index")
119
- llm_progress = gr.Textbox(value="None", label="QA chain initialization")
120
- qachain_btn = gr.Button("Initialize Question Answering chain")
121
 
122
- with gr.Tab("Step 4 - Chatbot"):
123
  chatbot = gr.Chatbot(height=300)
124
- with gr.Accordion("Advanced options - Document references", open=False):
125
- doc_sources = [gr.Textbox(label=f"Reference {i+1}", lines=2, container=True, scale=20) for i in range(3)]
126
- source_pages = [gr.Number(label="Page", scale=1) for _ in range(3)]
127
- msg = gr.Textbox(placeholder="Enter message (e.g., 'What is this document about?')", container=True)
128
- submit_btn = gr.Button("Send message")
129
- clear_btn = gr.ClearButton([msg, chatbot], value="Clear conversation")
130
 
131
  db_btn.click(initialize_database, inputs=[document, slider_chunk_size, slider_chunk_overlap], outputs=[vector_db, collection_name, db_progress])
132
  qachain_btn.click(initialize_LLM, inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db], outputs=[qa_chain, llm_progress])
 
96
  qa_chain = gr.State()
97
  collection_name = gr.State()
98
 
99
+ gr.Markdown("# Creatore di Chatbot basato su PDF")
100
 
101
+ with gr.Tab("Passo 1 - Carica PDF"):
102
+ document = gr.Files(height=100, file_count="multiple", file_types=["pdf"], interactive=True, label="Carica i tuoi documenti PDF")
103
 
104
+ with gr.Tab("Passo 2 - Elabora Documenti"):
105
+ db_btn = gr.Radio(["ChromaDB"], label="Tipo di database vettoriale", value="ChromaDB", type="index")
106
+ with gr.Accordion("Opzioni Avanzate - Divisione del testo del documento", open=False):
107
+ slider_chunk_size = gr.Slider(100, 1000, 1000, step=20, label="Dimensione del chunk")
108
+ slider_chunk_overlap = gr.Slider(10, 200, 100, step=10, label="Sovrapposizione del chunk")
109
+ db_progress = gr.Textbox(label="Inizializzazione del database vettoriale", value="Nessuna")
110
+ db_btn = gr.Button("Genera database vettoriale")
111
 
112
+ with gr.Tab("Passo 3 - Inizializza catena QA"):
113
+ llm_btn = gr.Radio(list_llm_simple, label="Modelli LLM", value=list_llm_simple[5], type="index")
114
+ with gr.Accordion("Opzioni avanzate - Modello LLM", open=False):
115
+ slider_temperature = gr.Slider(0.01, 1.0, 0.3, step=0.1, label="Temperatura")
116
+ slider_maxtokens = gr.Slider(224, 4096, 1024, step=32, label="Token massimi")
117
+ slider_topk = gr.Slider(1, 10, 3, step=1, label="Campioni top-k")
118
+ language_btn = gr.Radio(["Italiano", "Inglese"], label="Lingua", value="Italiano", type="index")
119
+ llm_progress = gr.Textbox(value="Nessuna", label="Inizializzazione catena QA")
120
+ qachain_btn = gr.Button("Inizializza catena di Domanda e Risposta")
121
 
122
+ with gr.Tab("Passo 4 - Chatbot"):
123
  chatbot = gr.Chatbot(height=300)
124
+ with gr.Accordion("Opzioni avanzate - Riferimenti ai documenti", open=False):
125
+ doc_sources = [gr.Textbox(label=f"Riferimento {i+1}", lines=2, container=True, scale=20) for i in range(3)]
126
+ source_pages = [gr.Number(label="Pagina", scale=1) for _ in range(3)]
127
+ msg = gr.Textbox(placeholder="Inserisci il messaggio (es. 'Di cosa tratta questo documento?')", container=True)
128
+ submit_btn = gr.Button("Invia messaggio")
129
+ clear_btn = gr.ClearButton([msg, chatbot], value="Cancella conversazione")
130
 
131
  db_btn.click(initialize_database, inputs=[document, slider_chunk_size, slider_chunk_overlap], outputs=[vector_db, collection_name, db_progress])
132
  qachain_btn.click(initialize_LLM, inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db], outputs=[qa_chain, llm_progress])