wjs0725 commited on
Commit
748300c
1 Parent(s): 92ebacc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -20
app.py CHANGED
@@ -66,14 +66,7 @@ class FluxEditor:
66
  raise ValueError(f"Got unknown model name: {name}, chose from {available}")
67
 
68
  # init all components
69
- self.t5 = load_t5(self.device, max_length=256 if self.name == "flux-schnell" else 512)
70
- self.clip = load_clip(self.device)
71
- self.model = load_flow_model(self.name, device="cpu" if self.offload else self.device)
72
- self.ae = load_ae(self.name, device="cpu" if self.offload else self.device)
73
- self.t5.eval()
74
- self.clip.eval()
75
- self.ae.eval()
76
- self.model.eval()
77
 
78
  if self.offload:
79
  self.model.cpu()
@@ -107,6 +100,8 @@ class FluxEditor:
107
  init_image = init_image[:new_h, :new_w, :]
108
 
109
  width, height = init_image.shape[0], init_image.shape[1]
 
 
110
  init_image = self.encode(init_image, self.device, self.ae)
111
 
112
  print(init_image.shape)
@@ -140,6 +135,13 @@ class FluxEditor:
140
 
141
  if not os.path.exists(self.feature_path):
142
  os.mkdir(self.feature_path)
 
 
 
 
 
 
 
143
 
144
  print("!!!!!!!!self.t5!!!!!!",next(self.t5.parameters()).device)
145
  print("!!!!!!!!self.clip!!!!!!",next(self.clip.parameters()).device)
@@ -150,18 +152,6 @@ class FluxEditor:
150
  allocated_memory = torch.cuda.memory_allocated(device)
151
  reserved_memory = torch.cuda.memory_reserved(device)
152
 
153
- print(f"Total memory: {total_memory / 1024**2:.2f} MB")
154
- print(f"Allocated memory: {allocated_memory / 1024**2:.2f} MB")
155
- print(f"Reserved memory: {reserved_memory / 1024**2:.2f} MB")
156
- self.t5 = self.t5.cuda()
157
- self.clip = self.clip.cuda()
158
- self.model = self.model.cuda()
159
-
160
- device = torch.cuda.current_device()
161
- total_memory = torch.cuda.get_device_properties(device).total_memory
162
- allocated_memory = torch.cuda.memory_allocated(device)
163
- reserved_memory = torch.cuda.memory_reserved(device)
164
-
165
  print(f"Total memory: {total_memory / 1024**2:.2f} MB")
166
  print(f"Allocated memory: {allocated_memory / 1024**2:.2f} MB")
167
  print(f"Reserved memory: {reserved_memory / 1024**2:.2f} MB")
 
66
  raise ValueError(f"Got unknown model name: {name}, chose from {available}")
67
 
68
  # init all components
69
+
 
 
 
 
 
 
 
70
 
71
  if self.offload:
72
  self.model.cpu()
 
100
  init_image = init_image[:new_h, :new_w, :]
101
 
102
  width, height = init_image.shape[0], init_image.shape[1]
103
+
104
+ self.ae = load_ae(self.name, device="cpu" if self.offload else self.device)
105
  init_image = self.encode(init_image, self.device, self.ae)
106
 
107
  print(init_image.shape)
 
135
 
136
  if not os.path.exists(self.feature_path):
137
  os.mkdir(self.feature_path)
138
+
139
+
140
+ print("!!!!!!!!!!!!device!!!!!!!!!!!!!!",device)
141
+ self.t5 = load_t5(self.device, max_length=256 if self.name == "flux-schnell" else 512)
142
+ self.clip = load_clip(self.device)
143
+ self.model = load_flow_model(self.name, device="cpu" if self.offload else self.device)
144
+
145
 
146
  print("!!!!!!!!self.t5!!!!!!",next(self.t5.parameters()).device)
147
  print("!!!!!!!!self.clip!!!!!!",next(self.clip.parameters()).device)
 
152
  allocated_memory = torch.cuda.memory_allocated(device)
153
  reserved_memory = torch.cuda.memory_reserved(device)
154
 
 
 
 
 
 
 
 
 
 
 
 
 
155
  print(f"Total memory: {total_memory / 1024**2:.2f} MB")
156
  print(f"Allocated memory: {allocated_memory / 1024**2:.2f} MB")
157
  print(f"Reserved memory: {reserved_memory / 1024**2:.2f} MB")