ZeroCool94 commited on
Commit
431e656
1 Parent(s): 0021eb2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -13
app.py CHANGED
@@ -3,7 +3,7 @@ import gradio as gr
3
  import torch
4
  from PIL import Image
5
 
6
- model_id = 'andite/anything-v4.0'
7
  prefix = ''
8
 
9
  scheduler = DPMSolverMultistepScheduler.from_pretrained(model_id, subfolder="scheduler")
@@ -79,14 +79,12 @@ with gr.Blocks(css=css) as demo:
79
  <h1>Anything V4.0</h1>
80
  </div>
81
  <p>
82
- Demo for <a href="https://huggingface.co/andite/anything-v4.0">Anything V4.0</a> Stable Diffusion model.<br>
83
  {"Add the following tokens to your prompts for the model to work properly: <b>prefix</b>" if prefix else ""}
84
  </p>
85
- Running on {"<b>GPU 🔥</b>" if torch.cuda.is_available() else f"<b>CPU 🥶</b>. For faster inference it is recommended to <b>upgrade to GPU in <a href='https://huggingface.co/spaces/akhaliq/anything-v4.0/settings'>Settings</a></b>"} after duplicating the space<br><br>
86
- <a style="display:inline-block" href="https://huggingface.co/spaces/akhaliq/anything-v4.0?duplicate=true"><img src="https://bit.ly/3gLdBN6" alt="Duplicate Space"></a>
87
  </div>
88
  """
89
- )
90
  with gr.Row():
91
 
92
  with gr.Column(scale=55):
@@ -126,12 +124,5 @@ with gr.Blocks(css=css) as demo:
126
  prompt.submit(inference, inputs=inputs, outputs=outputs)
127
  generate.click(inference, inputs=inputs, outputs=outputs)
128
 
129
- gr.HTML("""
130
- <div style="border-top: 1px solid #303030;">
131
- <br>
132
- <p>This space was created using <a href="https://huggingface.co/spaces/anzorq/sd-space-creator">SD Space Creator</a>.</p>
133
- </div>
134
- """)
135
-
136
- demo.queue(concurrency_count=1)
137
  demo.launch()
 
3
  import torch
4
  from PIL import Image
5
 
6
+ model_id = 'Sygil/Sygil-Diffusion'
7
  prefix = ''
8
 
9
  scheduler = DPMSolverMultistepScheduler.from_pretrained(model_id, subfolder="scheduler")
 
79
  <h1>Anything V4.0</h1>
80
  </div>
81
  <p>
82
+ Demo for the <a href="https://huggingface.co/Sygil/Sygil-Diffusion">Sygil Diffusion v0.1</a> model.<br>
83
  {"Add the following tokens to your prompts for the model to work properly: <b>prefix</b>" if prefix else ""}
84
  </p>
 
 
85
  </div>
86
  """
87
+ )
88
  with gr.Row():
89
 
90
  with gr.Column(scale=55):
 
124
  prompt.submit(inference, inputs=inputs, outputs=outputs)
125
  generate.click(inference, inputs=inputs, outputs=outputs)
126
 
127
+ demo.queue(concurrency_count=2)
 
 
 
 
 
 
 
128
  demo.launch()