Spaces:
Running
on
Zero
Running
on
Zero
Update inf.py
Browse files
inf.py
CHANGED
@@ -108,14 +108,12 @@ class InferencePipeline:
|
|
108 |
guidance_scale: float,
|
109 |
num_images_per_prompt: int = 1
|
110 |
) -> PIL.Image.Image:
|
111 |
-
|
112 |
-
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
|
113 |
if not torch.cuda.is_available():
|
114 |
raise gr.Error('CUDA is not available.')
|
115 |
|
116 |
self.load_pipe(content_lora_model_id, style_lora_model_id, content_alpha, style_alpha)
|
117 |
|
118 |
-
generator = torch.Generator(
|
119 |
out = self.pipe(
|
120 |
prompt,
|
121 |
num_inference_steps=n_steps,
|
|
|
108 |
guidance_scale: float,
|
109 |
num_images_per_prompt: int = 1
|
110 |
) -> PIL.Image.Image:
|
|
|
|
|
111 |
if not torch.cuda.is_available():
|
112 |
raise gr.Error('CUDA is not available.')
|
113 |
|
114 |
self.load_pipe(content_lora_model_id, style_lora_model_id, content_alpha, style_alpha)
|
115 |
|
116 |
+
generator = torch.Generator().manual_seed(seed)
|
117 |
out = self.pipe(
|
118 |
prompt,
|
119 |
num_inference_steps=n_steps,
|