collij22 commited on
Commit
1f7d971
·
verified ·
1 Parent(s): 602b560

Upload BioGptForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +7 -0
  3. model.safetensors +2 -2
config.json CHANGED
@@ -2,7 +2,7 @@
2
  "_name_or_path": "microsoft/BioGPT-Large-PubMedQA",
3
  "activation_dropout": 0.0,
4
  "architectures": [
5
- "BioGptForSequenceClassification"
6
  ],
7
  "attention_probs_dropout_prob": 0.1,
8
  "bos_token_id": 0,
 
2
  "_name_or_path": "microsoft/BioGPT-Large-PubMedQA",
3
  "activation_dropout": 0.0,
4
  "architectures": [
5
+ "BioGptForCausalLM"
6
  ],
7
  "attention_probs_dropout_prob": 0.1,
8
  "bos_token_id": 0,
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 1,
6
+ "transformers_version": "4.38.2"
7
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d63d2d3cfa7a22b9330d3da7b12d4d3826fa1b139478be8f857889f8a53a11d5
3
- size 3142498272
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6aacdca1f8694b78aea33506f9bf804911b8aa4945ec7d3b757c46c26e55edd2
3
+ size 3142491784