For hf chat and gradio api
Browse files
app.py
CHANGED
@@ -105,7 +105,6 @@ with gr.Blocks() as demo:
|
|
105 |
inputs=[prompt, seed, width, height],
|
106 |
outputs=[result, seed, latency],
|
107 |
show_progress="hidden",
|
108 |
-
api_name="Enhance",
|
109 |
queue=False,
|
110 |
concurrency_limit=None
|
111 |
)
|
@@ -116,8 +115,7 @@ with gr.Blocks() as demo:
|
|
116 |
outputs=[result, seed, latency],
|
117 |
show_progress="full",
|
118 |
api_name="RealtimeFlux",
|
119 |
-
queue=False
|
120 |
-
concurrency_limit=None
|
121 |
)
|
122 |
|
123 |
def update_ui(realtime_enabled):
|
@@ -143,7 +141,6 @@ with gr.Blocks() as demo:
|
|
143 |
inputs=[prompt, seed, width, height, randomize_seed, num_inference_steps],
|
144 |
outputs=[result, seed, latency],
|
145 |
show_progress="full",
|
146 |
-
api_name=False,
|
147 |
queue=False,
|
148 |
concurrency_limit=None
|
149 |
)
|
@@ -154,7 +151,6 @@ with gr.Blocks() as demo:
|
|
154 |
inputs=[realtime, prompt, seed, width, height, randomize_seed, num_inference_steps],
|
155 |
outputs=[result, seed, latency],
|
156 |
show_progress="hidden",
|
157 |
-
api_name=False,
|
158 |
trigger_mode="always_last",
|
159 |
queue=False,
|
160 |
concurrency_limit=None
|
|
|
105 |
inputs=[prompt, seed, width, height],
|
106 |
outputs=[result, seed, latency],
|
107 |
show_progress="hidden",
|
|
|
108 |
queue=False,
|
109 |
concurrency_limit=None
|
110 |
)
|
|
|
115 |
outputs=[result, seed, latency],
|
116 |
show_progress="full",
|
117 |
api_name="RealtimeFlux",
|
118 |
+
queue=False
|
|
|
119 |
)
|
120 |
|
121 |
def update_ui(realtime_enabled):
|
|
|
141 |
inputs=[prompt, seed, width, height, randomize_seed, num_inference_steps],
|
142 |
outputs=[result, seed, latency],
|
143 |
show_progress="full",
|
|
|
144 |
queue=False,
|
145 |
concurrency_limit=None
|
146 |
)
|
|
|
151 |
inputs=[realtime, prompt, seed, width, height, randomize_seed, num_inference_steps],
|
152 |
outputs=[result, seed, latency],
|
153 |
show_progress="hidden",
|
|
|
154 |
trigger_mode="always_last",
|
155 |
queue=False,
|
156 |
concurrency_limit=None
|