Kohaku-Blueleaf commited on
Commit
25e6739
1 Parent(s): 788f5e1

remove redundant spaces.GPU

Browse files
Files changed (1) hide show
  1. app.py +0 -5
app.py CHANGED
@@ -134,7 +134,6 @@ k_sampler = KDiffusionSampler(
134
  )
135
 
136
 
137
- @spaces.GPU
138
  @torch.inference_mode()
139
  def encode_cropped_prompt_77tokens(txt: str):
140
  cond_ids = tokenizer(
@@ -148,7 +147,6 @@ def encode_cropped_prompt_77tokens(txt: str):
148
  return text_cond
149
 
150
 
151
- @spaces.GPU
152
  @torch.inference_mode()
153
  def encode_cropped_prompt(txt: str, max_length=225):
154
  cond_ids = tokenizer(
@@ -183,7 +181,6 @@ def encode_cropped_prompt(txt: str, max_length=225):
183
  return text_cond.flatten(0, 1).unsqueeze(0)
184
 
185
 
186
- @spaces.GPU
187
  @torch.inference_mode()
188
  def pytorch2numpy(imgs):
189
  results = []
@@ -195,7 +192,6 @@ def pytorch2numpy(imgs):
195
  return results
196
 
197
 
198
- @spaces.GPU
199
  @torch.inference_mode()
200
  def numpy2pytorch(imgs):
201
  h = torch.from_numpy(np.stack(imgs, axis=0)).float() / 127.5 - 1.0
@@ -203,7 +199,6 @@ def numpy2pytorch(imgs):
203
  return h
204
 
205
 
206
- @spaces.GPU
207
  @torch.inference_mode()
208
  def interrogator_process(x):
209
  img = Image.fromarray(x)
 
134
  )
135
 
136
 
 
137
  @torch.inference_mode()
138
  def encode_cropped_prompt_77tokens(txt: str):
139
  cond_ids = tokenizer(
 
147
  return text_cond
148
 
149
 
 
150
  @torch.inference_mode()
151
  def encode_cropped_prompt(txt: str, max_length=225):
152
  cond_ids = tokenizer(
 
181
  return text_cond.flatten(0, 1).unsqueeze(0)
182
 
183
 
 
184
  @torch.inference_mode()
185
  def pytorch2numpy(imgs):
186
  results = []
 
192
  return results
193
 
194
 
 
195
  @torch.inference_mode()
196
  def numpy2pytorch(imgs):
197
  h = torch.from_numpy(np.stack(imgs, axis=0)).float() / 127.5 - 1.0
 
199
  return h
200
 
201
 
 
202
  @torch.inference_mode()
203
  def interrogator_process(x):
204
  img = Image.fromarray(x)