nikigoli commited on
Commit
338c332
1 Parent(s): 8a701a5

Removed toplevel checkpoint setting for transformer

Browse files
Files changed (1) hide show
  1. app.py +0 -2
app.py CHANGED
@@ -156,8 +156,6 @@ def build_model_and_transforms(args):
156
 
157
  checkpoint = torch.load(args.pretrain_model_path, map_location="cpu")["model"]
158
  model.load_state_dict(checkpoint, strict=False)
159
- model.transformer.use_checkpoint = False
160
- model.transformer.use_transformer_ckpt = False
161
 
162
  model.eval()
163
 
 
156
 
157
  checkpoint = torch.load(args.pretrain_model_path, map_location="cpu")["model"]
158
  model.load_state_dict(checkpoint, strict=False)
 
 
159
 
160
  model.eval()
161