dandanw commited on
Commit
153899f
·
1 Parent(s): 8fad41a

remove a file

Browse files
Files changed (1) hide show
  1. config.json.bk +0 -33
config.json.bk DELETED
@@ -1,33 +0,0 @@
1
- {
2
- "_name_or_path": "./bloomz-3b-sv",
3
- "apply_residual_connection_post_layernorm": false,
4
- "architectures": [
5
- "BloomForCausalLM"
6
- ],
7
- "attention_dropout": 0.0,
8
- "attention_softmax_in_fp32": true,
9
- "bias_dropout_fusion": true,
10
- "bos_token_id": 1,
11
- "eos_token_id": 2,
12
- "hidden_dropout": 0.0,
13
- "hidden_size": 2560,
14
- "initializer_range": 0.02,
15
- "layer_norm_epsilon": 1e-05,
16
- "masked_softmax_fusion": true,
17
- "model_type": "bloom",
18
- "n_head": 32,
19
- "n_inner": null,
20
- "n_layer": 30,
21
- "offset_alibi": 100,
22
- "pad_token_id": 3,
23
- "pretraining_tp": 4,
24
- "seq_length": 2048,
25
- "skip_bias_add": true,
26
- "skip_bias_add_qkv": false,
27
- "slow_but_exact": false,
28
- "torch_dtype": "float32",
29
- "transformers_version": "4.27.2",
30
- "unk_token_id": 0,
31
- "use_cache": true,
32
- "vocab_size": 258748
33
- }