AlekseyCalvin commited on
Commit
dd188e5
1 Parent(s): fc923ab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -112,7 +112,7 @@ def update_selection(evt: gr.SelectData, width, height):
112
  )
113
 
114
  @spaces.GPU(duration=70)
115
- def generate_image(prompt, negative_prompt, trigger_word, steps, seed, cfg_scale, width, height, lora_scale):
116
  pipe.to("cuda")
117
  generator = torch.Generator(device="cuda").manual_seed(seed)
118
 
@@ -130,7 +130,7 @@ def generate_image(prompt, negative_prompt, trigger_word, steps, seed, cfg_scale
130
  ).images[0]
131
  return image
132
 
133
- def run_lora(prompt, negative_prompt, cfg_scale, steps, selected_index, seed, width, height, lora_scale):
134
  if negative_prompt == "":
135
  negative_prompt = None
136
  if selected_index is None:
@@ -167,7 +167,7 @@ def run_lora(prompt, negative_prompt, cfg_scale, steps, selected_index, seed, wi
167
  # pipe.load_lora_weights(lora_path, adapter_name=selected_lora["repo"], lora_scale=[1.0])
168
  # pipe.set_adapters(["fast", selected_lora["repo"]], adapter_weights=[1.0, 1.0])
169
 
170
- image = generate_image(prompt, negative_prompt, steps, seed, cfg_scale, width, height, lora_scale)
171
  pipe.to("cpu")
172
  pipe.unload_lora_weights()
173
  return image, seed
@@ -246,7 +246,7 @@ with gr.Blocks(theme=gr.themes.Soft(), css=css) as app:
246
  gr.on(
247
  triggers=[generate_button.click, prompt.submit],
248
  fn=run_lora,
249
- inputs=[prompt, negative_prompt, cfg_scale, steps, selected_index, seed, width, height, lora_scale],
250
  outputs=[result, seed]
251
  )
252
 
 
112
  )
113
 
114
  @spaces.GPU(duration=70)
115
+ def generate_image(prompt, negative_prompt, lora_scale, steps, seed, cfg_scale, width, height):
116
  pipe.to("cuda")
117
  generator = torch.Generator(device="cuda").manual_seed(seed)
118
 
 
130
  ).images[0]
131
  return image
132
 
133
+ def run_lora(prompt, negative_prompt, lora_scale, cfg_scale, steps, selected_index, seed, width, height):
134
  if negative_prompt == "":
135
  negative_prompt = None
136
  if selected_index is None:
 
167
  # pipe.load_lora_weights(lora_path, adapter_name=selected_lora["repo"], lora_scale=[1.0])
168
  # pipe.set_adapters(["fast", selected_lora["repo"]], adapter_weights=[1.0, 1.0])
169
 
170
+ image = generate_image(prompt, lora_scale, negative_prompt, steps, seed, cfg_scale, width, height)
171
  pipe.to("cpu")
172
  pipe.unload_lora_weights()
173
  return image, seed
 
246
  gr.on(
247
  triggers=[generate_button.click, prompt.submit],
248
  fn=run_lora,
249
+ inputs=[prompt, negative_prompt, lora_scale, cfg_scale, steps, selected_index, seed, width, height],
250
  outputs=[result, seed]
251
  )
252