wjs0725 commited on
Commit
fd38fd2
1 Parent(s): 1323822

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -1
app.py CHANGED
@@ -89,7 +89,6 @@ class FluxEditor:
89
  self.ae.encoder.to(self.device)
90
 
91
  @torch.inference_mode()
92
- @spaces.GPU(duration=150)
93
  def edit(self, init_image, source_prompt, target_prompt, num_steps, inject_step, guidance, seed):
94
  torch.cuda.empty_cache()
95
  seed = None
@@ -137,6 +136,32 @@ class FluxEditor:
137
 
138
  if not os.path.exists(self.feature_path):
139
  os.mkdir(self.feature_path)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
140
 
141
  with torch.no_grad():
142
  inp = prepare(self.t5, self.clip, init_image, prompt=opts.source_prompt)
 
89
  self.ae.encoder.to(self.device)
90
 
91
  @torch.inference_mode()
 
92
  def edit(self, init_image, source_prompt, target_prompt, num_steps, inject_step, guidance, seed):
93
  torch.cuda.empty_cache()
94
  seed = None
 
136
 
137
  if not os.path.exists(self.feature_path):
138
  os.mkdir(self.feature_path)
139
+
140
+ print("!!!!!!!!self.t5!!!!!!",next(self.t5.parameters()).device)
141
+ print("!!!!!!!!self.clip!!!!!!",next(self.clip.parameters()).device)
142
+ print("!!!!!!!!self.model!!!!!!",next(self.model.parameters()).device)
143
+
144
+ device = torch.cuda.current_device()
145
+ total_memory = torch.cuda.get_device_properties(device).total_memory
146
+ allocated_memory = torch.cuda.memory_allocated(device)
147
+ reserved_memory = torch.cuda.memory_reserved(device)
148
+
149
+ print(f"Total memory: {total_memory / 1024**2:.2f} MB")
150
+ print(f"Allocated memory: {allocated_memory / 1024**2:.2f} MB")
151
+ print(f"Reserved memory: {reserved_memory / 1024**2:.2f} MB")
152
+ self.t5 = self.t5.cuda()
153
+ self.clip = self.clip.cuda()
154
+ self.model = self.model.cuda()
155
+
156
+ device = torch.cuda.current_device()
157
+ total_memory = torch.cuda.get_device_properties(device).total_memory
158
+ allocated_memory = torch.cuda.memory_allocated(device)
159
+ reserved_memory = torch.cuda.memory_reserved(device)
160
+
161
+ print(f"Total memory: {total_memory / 1024**2:.2f} MB")
162
+ print(f"Allocated memory: {allocated_memory / 1024**2:.2f} MB")
163
+ print(f"Reserved memory: {reserved_memory / 1024**2:.2f} MB")
164
+
165
 
166
  with torch.no_grad():
167
  inp = prepare(self.t5, self.clip, init_image, prompt=opts.source_prompt)