Charbel Malo commited on
Commit
49398d2
1 Parent(s): 9c97141

Update live_preview_helpers.py

Browse files
Files changed (1) hide show
  1. live_preview_helpers.py +2 -3
live_preview_helpers.py CHANGED
@@ -2,7 +2,6 @@ import torch
2
  import numpy as np
3
  from diffusers import FluxPipeline, AutoencoderTiny, FlowMatchEulerDiscreteScheduler
4
  from typing import Any, Dict, List, Optional, Union
5
- import spaces
6
 
7
  # Helper functions
8
  def calculate_shift(
@@ -41,7 +40,7 @@ def retrieve_timesteps(
41
  return timesteps, num_inference_steps
42
 
43
  # FLUX pipeline function
44
- @spaces.GPU()
45
  def flux_pipe_call_that_returns_an_iterable_of_images(
46
  self,
47
  prompt: Union[str, List[str]] = None,
@@ -164,4 +163,4 @@ def flux_pipe_call_that_returns_an_iterable_of_images(
164
  image = good_vae.decode(latents, return_dict=False)[0]
165
  self.maybe_free_model_hooks()
166
  torch.cuda.empty_cache()
167
- yield self.image_processor.postprocess(image, output_type=output_type)[0]
 
2
  import numpy as np
3
  from diffusers import FluxPipeline, AutoencoderTiny, FlowMatchEulerDiscreteScheduler
4
  from typing import Any, Dict, List, Optional, Union
 
5
 
6
  # Helper functions
7
  def calculate_shift(
 
40
  return timesteps, num_inference_steps
41
 
42
  # FLUX pipeline function
43
+ @torch.inference_mode()
44
  def flux_pipe_call_that_returns_an_iterable_of_images(
45
  self,
46
  prompt: Union[str, List[str]] = None,
 
163
  image = good_vae.decode(latents, return_dict=False)[0]
164
  self.maybe_free_model_hooks()
165
  torch.cuda.empty_cache()
166
+ yield self.image_processor.postprocess(image, output_type=output_type)[0]