Removed toplevel checkpoint setting for transformer
Browse files
app.py
CHANGED
@@ -156,8 +156,6 @@ def build_model_and_transforms(args):
|
|
156 |
|
157 |
checkpoint = torch.load(args.pretrain_model_path, map_location="cpu")["model"]
|
158 |
model.load_state_dict(checkpoint, strict=False)
|
159 |
-
model.transformer.use_checkpoint = False
|
160 |
-
model.transformer.use_transformer_ckpt = False
|
161 |
|
162 |
model.eval()
|
163 |
|
|
|
156 |
|
157 |
checkpoint = torch.load(args.pretrain_model_path, map_location="cpu")["model"]
|
158 |
model.load_state_dict(checkpoint, strict=False)
|
|
|
|
|
159 |
|
160 |
model.eval()
|
161 |
|