Samanenayati commited on
Commit
a62101d
1 Parent(s): 44833bc

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +1 -1
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -9,7 +9,7 @@
9
  "num_attention_heads": 16,
10
  "num_layers": 24,
11
  "num_transformer_submodules": 1,
12
- "num_virtual_tokens": 2,
13
  "peft_type": "P_TUNING",
14
  "revision": null,
15
  "task_type": "SEQ_CLS",
 
9
  "num_attention_heads": 16,
10
  "num_layers": 24,
11
  "num_transformer_submodules": 1,
12
+ "num_virtual_tokens": 3,
13
  "peft_type": "P_TUNING",
14
  "revision": null,
15
  "task_type": "SEQ_CLS",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ccd5ea7fc14ea45a69d6a8ca11755f86c9f911547c033ae5616df5c925355b4b
3
- size 4215320
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1107a2fae387095d79c1aee71bb7758a5463be0261025b3ebbc748ee5b9feaa
3
+ size 4219416