Spaces:
Running
Running
acecalisto3
commited on
Commit
•
e9b070c
1
Parent(s):
c20a17a
Update app.py
Browse files
app.py
CHANGED
@@ -245,7 +245,16 @@ def main():
|
|
245 |
outputs=model_description,
|
246 |
)
|
247 |
|
248 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
249 |
|
250 |
# --- Chat Interface ---
|
251 |
with gr.Tab("Chat"):
|
@@ -262,6 +271,8 @@ def main():
|
|
262 |
history = gr.State([])
|
263 |
|
264 |
def run_chat(purpose: str, message: str, agent_name: str, sys_prompt: str, temperature: float, max_new_tokens: int, top_p: float, repetition_penalty: float, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
|
|
|
|
|
265 |
response = generate_response(message, history, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty)
|
266 |
history.append((message, response))
|
267 |
return history, history
|
@@ -288,5 +299,7 @@ def main():
|
|
288 |
run_command_button.click(execute_command, inputs=command_input, outputs=command_output)
|
289 |
preview_button.click(preview_project, outputs=project_output)
|
290 |
|
|
|
|
|
291 |
if __name__ == "__main__":
|
292 |
main()
|
|
|
245 |
outputs=model_description,
|
246 |
)
|
247 |
|
248 |
+
# --- Event handler to load the selected model ---
|
249 |
+
def load_selected_model(model_name):
|
250 |
+
global current_model
|
251 |
+
load_output = load_hf_model(model_name)
|
252 |
+
if current_model:
|
253 |
+
return f"Model '{model_name}' loaded successfully!"
|
254 |
+
else:
|
255 |
+
return f"Error loading model '{model_name}'"
|
256 |
+
|
257 |
+
load_button.click(load_selected_model, inputs=model_name, outputs=load_output)
|
258 |
|
259 |
# --- Chat Interface ---
|
260 |
with gr.Tab("Chat"):
|
|
|
271 |
history = gr.State([])
|
272 |
|
273 |
def run_chat(purpose: str, message: str, agent_name: str, sys_prompt: str, temperature: float, max_new_tokens: int, top_p: float, repetition_penalty: float, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
|
274 |
+
if not current_model:
|
275 |
+
return [(history, history), "Please load a model first."]
|
276 |
response = generate_response(message, history, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty)
|
277 |
history.append((message, response))
|
278 |
return history, history
|
|
|
299 |
run_command_button.click(execute_command, inputs=command_input, outputs=command_output)
|
300 |
preview_button.click(preview_project, outputs=project_output)
|
301 |
|
302 |
+
demo.launch()
|
303 |
+
|
304 |
if __name__ == "__main__":
|
305 |
main()
|