AlekseyCalvin commited on
Commit
6a252ce
1 Parent(s): f152e69

Update pipeline.py

Browse files
Files changed (1) hide show
  1. pipeline.py +26 -4
pipeline.py CHANGED
@@ -169,7 +169,7 @@ class FluxWithCFGPipeline(DiffusionPipeline, FluxLoraLoaderMixin, FromSingleFile
169
  prompt = [prompt] if isinstance(prompt, str) else prompt
170
  batch_size = len(prompt)
171
 
172
- text_inputs = tokenizer(
173
  prompt,
174
  padding="max_length",
175
  max_length=self.tokenizer_max_length,
@@ -241,7 +241,7 @@ class FluxWithCFGPipeline(DiffusionPipeline, FluxLoraLoaderMixin, FromSingleFile
241
  batch_size = len(prompt)
242
  else:
243
  batch_size = prompt_embeds.shape[0]
244
-
245
  if prompt_embeds is None:
246
  prompt_2 = prompt_2 or prompt
247
  prompt_2 = [prompt_2] if isinstance(prompt_2, str) else prompt_2
@@ -258,6 +258,28 @@ class FluxWithCFGPipeline(DiffusionPipeline, FluxLoraLoaderMixin, FromSingleFile
258
  max_sequence_length=max_sequence_length,
259
  device=device,
260
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
261
 
262
  if do_classifier_free_guidance and negative_prompt_embeds is None:
263
  negative_prompt = negative_prompt or ""
@@ -269,12 +291,12 @@ class FluxWithCFGPipeline(DiffusionPipeline, FluxLoraLoaderMixin, FromSingleFile
269
  batch_size * [negative_prompt_2] if isinstance(negative_prompt_2, str) else negative_prompt_2
270
  )
271
 
272
- if prompt is not None and type(prompt) is not type(negative_prompt):
273
  raise TypeError(
274
  f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !="
275
  f" {type(prompt)}."
276
  )
277
- elif batch_size != len(negative_prompt):
278
  raise ValueError(
279
  f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:"
280
  f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches"
 
169
  prompt = [prompt] if isinstance(prompt, str) else prompt
170
  batch_size = len(prompt)
171
 
172
+ text_inputs = self.tokenizer(
173
  prompt,
174
  padding="max_length",
175
  max_length=self.tokenizer_max_length,
 
241
  batch_size = len(prompt)
242
  else:
243
  batch_size = prompt_embeds.shape[0]
244
+
245
  if prompt_embeds is None:
246
  prompt_2 = prompt_2 or prompt
247
  prompt_2 = [prompt_2] if isinstance(prompt_2, str) else prompt_2
 
258
  max_sequence_length=max_sequence_length,
259
  device=device,
260
  )
261
+ prompt_2_embed, pooled_prompt_2_embed = self._get_clip_prompt_embeds(
262
+ prompt=prompt_2,
263
+ device=device,
264
+ num_images_per_prompt=num_images_per_prompt,
265
+ clip_skip=clip_skip,
266
+ clip_model_index=1,
267
+ )
268
+ clip_prompt_embeds = torch.cat([prompt_embed, prompt_2_embed], dim=-1)
269
+
270
+ t5_prompt_embed = self._get_t5_prompt_embeds(
271
+ prompt=prompt_3,
272
+ num_images_per_prompt=num_images_per_prompt,
273
+ max_sequence_length=max_sequence_length,
274
+ device=device,
275
+ )
276
+
277
+ clip_prompt_embeds = torch.nn.functional.pad(
278
+ clip_prompt_embeds, (0, t5_prompt_embed.shape[-1] - clip_prompt_embeds.shape[-1])
279
+ )
280
+
281
+ prompt_embeds = torch.cat([clip_prompt_embeds, t5_prompt_embed], dim=-2)
282
+ pooled_prompt_embeds = torch.cat([pooled_prompt_embed, pooled_prompt_2_embed], dim=-1)
283
 
284
  if do_classifier_free_guidance and negative_prompt_embeds is None:
285
  negative_prompt = negative_prompt or ""
 
291
  batch_size * [negative_prompt_2] if isinstance(negative_prompt_2, str) else negative_prompt_2
292
  )
293
 
294
+ if prompt is not None and type(prompt) is not type(negative_prompt):
295
  raise TypeError(
296
  f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !="
297
  f" {type(prompt)}."
298
  )
299
+ elif batch_size != len(negative_prompt):
300
  raise ValueError(
301
  f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:"
302
  f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches"