Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -86,7 +86,8 @@ class FluxEditor:
|
|
86 |
@spaces.GPU(duration=120)
|
87 |
@torch.inference_mode()
|
88 |
def edit(self, init_image, source_prompt, target_prompt, num_steps, inject_step, guidance, seed):
|
89 |
-
|
|
|
90 |
torch.cuda.empty_cache()
|
91 |
seed = None
|
92 |
# if seed == -1:
|
@@ -137,7 +138,7 @@ class FluxEditor:
|
|
137 |
os.mkdir(self.feature_path)
|
138 |
|
139 |
|
140 |
-
print("!!!!!!!!!!!!device!!!!!!!!!!!!!!",device)
|
141 |
self.t5 = load_t5(self.device, max_length=256 if self.name == "flux-schnell" else 512)
|
142 |
self.clip = load_clip(self.device)
|
143 |
self.model = load_flow_model(self.name, device="cpu" if self.offload else self.device)
|
|
|
86 |
@spaces.GPU(duration=120)
|
87 |
@torch.inference_mode()
|
88 |
def edit(self, init_image, source_prompt, target_prompt, num_steps, inject_step, guidance, seed):
|
89 |
+
|
90 |
+
self.device = "cuda" if torch.cuda.is_available() else "cpu"
|
91 |
torch.cuda.empty_cache()
|
92 |
seed = None
|
93 |
# if seed == -1:
|
|
|
138 |
os.mkdir(self.feature_path)
|
139 |
|
140 |
|
141 |
+
print("!!!!!!!!!!!!device!!!!!!!!!!!!!!",self.device)
|
142 |
self.t5 = load_t5(self.device, max_length=256 if self.name == "flux-schnell" else 512)
|
143 |
self.clip = load_clip(self.device)
|
144 |
self.model = load_flow_model(self.name, device="cpu" if self.offload else self.device)
|