akhaliq HF staff commited on
Commit
5aeaad2
1 Parent(s): 48a8102
Files changed (2) hide show
  1. app.py +1 -1
  2. app_allenai.py +16 -9
app.py CHANGED
@@ -35,7 +35,7 @@ with gr.Blocks(fill_height=True) as demo:
35
  demo_qwen.render()
36
  with gr.Tab("Experimental"):
37
  demo_experimental.render()
38
- with gr.Tab("AllenAI/Llama-3.1-Tulu-3-8B"):
39
  demo_allenai.render()
40
  with gr.Tab("Meta Llama"):
41
  demo_sambanova.render()
 
35
  demo_qwen.render()
36
  with gr.Tab("Experimental"):
37
  demo_experimental.render()
38
+ with gr.Tab("AllenAI"):
39
  demo_allenai.render()
40
  with gr.Tab("Meta Llama"):
41
  demo_sambanova.render()
app_allenai.py CHANGED
@@ -1,21 +1,28 @@
1
  import gradio as gr
2
  import spaces
3
  import transformers_gradio
4
- from utils import get_app
5
 
6
- # Load Llama model
7
  llama_demo = gr.load(name="allenai/Llama-3.1-Tulu-3-8B", src=transformers_gradio.registry)
8
  llama_demo.fn = spaces.GPU()(llama_demo.fn)
9
 
10
- # Load OLMo model
11
  olmo_demo = gr.load(name="akhaliq/olmo-anychat", src="spaces")
12
 
13
- # Create combined demo with dropdown
14
- demo = get_app(
15
- models=["allenai/Llama-3.1-Tulu-3-8B", "akhaliq/olmo-anychat"],
16
- default_model="allenai/Llama-3.1-Tulu-3-8B",
17
- src=lambda name, _: llama_demo if name == "allenai/Llama-3.1-Tulu-3-8B" else olmo_demo
18
- )
 
 
 
 
 
 
 
 
 
19
 
20
  # Disable API names
21
  for fn in demo.fns.values():
 
1
  import gradio as gr
2
  import spaces
3
  import transformers_gradio
 
4
 
5
+ # Load models
6
  llama_demo = gr.load(name="allenai/Llama-3.1-Tulu-3-8B", src=transformers_gradio.registry)
7
  llama_demo.fn = spaces.GPU()(llama_demo.fn)
8
 
 
9
  olmo_demo = gr.load(name="akhaliq/olmo-anychat", src="spaces")
10
 
11
+ # Create the interface
12
+ with gr.Blocks() as demo:
13
+ model_dropdown = gr.Dropdown(
14
+ choices=["allenai/Llama-3.1-Tulu-3-8B", "akhaliq/olmo-anychat"],
15
+ value="allenai/Llama-3.1-Tulu-3-8B",
16
+ label="Select Model"
17
+ )
18
+
19
+ def chat(message, model_name):
20
+ if model_name == "allenai/Llama-3.1-Tulu-3-8B":
21
+ return llama_demo.fn(message)
22
+ else:
23
+ return olmo_demo.fn(message)
24
+
25
+ chatinterface = gr.ChatInterface(chat, additional_inputs=[model_dropdown])
26
 
27
  # Disable API names
28
  for fn in demo.fns.values():