animrods commited on
Commit
1de1c90
·
verified ·
1 Parent(s): d6df9be

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -23,7 +23,7 @@ def read_content(file_path: str) -> str:
23
 
24
  return content
25
 
26
- def predict(image, prompt="high quality, best quality", negative_prompt="", guidance_scale=5, steps=30, ip_adapter_scale = 1.0, width=1024, height=1024, seed=0, center_crop=False):
27
 
28
  pipe.set_ip_adapter_scale(ip_adapter_scale)
29
 
@@ -31,8 +31,8 @@ def predict(image, prompt="high quality, best quality", negative_prompt="", guid
31
  negative_prompt = None
32
 
33
  init_image = image.convert("RGB")
34
- if center_crop is False:
35
- init_image = init_image.resize((224, 224))
36
 
37
  generator = torch.Generator(device="cpu").manual_seed(int(seed))
38
 
@@ -129,8 +129,8 @@ with image_blocks as demo:
129
 
130
 
131
 
132
- btn.click(fn=predict, inputs=[image, prompt, negative_prompt, guidance_scale, steps, ip_adapter_scale, width, height, seed, center_crop], outputs=[image_out], api_name='run')
133
- prompt.submit(fn=predict, inputs=[image, prompt, negative_prompt, guidance_scale, steps, ip_adapter_scale, width, height, seed, center_crop], outputs=[image_out])
134
 
135
  # gr.Examples(
136
  # examples=[
 
23
 
24
  return content
25
 
26
+ def predict(image, prompt="high quality, best quality", negative_prompt="", guidance_scale=5, steps=30, ip_adapter_scale = 1.0, width=1024, height=1024, seed=0):
27
 
28
  pipe.set_ip_adapter_scale(ip_adapter_scale)
29
 
 
31
  negative_prompt = None
32
 
33
  init_image = image.convert("RGB")
34
+ # if center_crop is False:
35
+ init_image = init_image.resize((224, 224))
36
 
37
  generator = torch.Generator(device="cpu").manual_seed(int(seed))
38
 
 
129
 
130
 
131
 
132
+ btn.click(fn=predict, inputs=[image, prompt, negative_prompt, guidance_scale, steps, ip_adapter_scale, width, height, seed], outputs=[image_out], api_name='run')
133
+ prompt.submit(fn=predict, inputs=[image, prompt, negative_prompt, guidance_scale, steps, ip_adapter_scale, width, height, seed], outputs=[image_out])
134
 
135
  # gr.Examples(
136
  # examples=[