multimodalart HF staff commited on
Commit
f8ac431
1 Parent(s): 9954463

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -16,7 +16,7 @@ MAX_SEED = np.iinfo(np.int32).max
16
  MAX_IMAGE_SIZE = 1024
17
 
18
  @spaces.GPU
19
- def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps):
20
 
21
  if randomize_seed:
22
  seed = random.randint(0, MAX_SEED)
@@ -55,8 +55,8 @@ with gr.Blocks(css=css) as demo:
55
 
56
  with gr.Column(elem_id="col-container"):
57
  gr.Markdown(f"""
58
- # Text-to-Image Gradio Template
59
- Currently running on {power_device}.
60
  """)
61
 
62
  with gr.Row():
@@ -134,7 +134,8 @@ with gr.Blocks(css=css) as demo:
134
  cache_examples=False
135
  )
136
 
137
- run_button.click(
 
138
  fn = infer,
139
  inputs = [prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
140
  outputs = [result]
 
16
  MAX_IMAGE_SIZE = 1024
17
 
18
  @spaces.GPU
19
+ def infer(prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps, progress=gr.Progress(track_tqdm=True)):
20
 
21
  if randomize_seed:
22
  seed = random.randint(0, MAX_SEED)
 
55
 
56
  with gr.Column(elem_id="col-container"):
57
  gr.Markdown(f"""
58
+ # AuraFlow demo
59
+ Open source 5.6B parameters MMDiT model
60
  """)
61
 
62
  with gr.Row():
 
134
  cache_examples=False
135
  )
136
 
137
+ gr.on(
138
+ triggers=[run_button.click, prompt.submit, negative_prompt.submit],
139
  fn = infer,
140
  inputs = [prompt, negative_prompt, seed, randomize_seed, width, height, guidance_scale, num_inference_steps],
141
  outputs = [result]