Text Generation
Transformers
PyTorch
TensorBoard
Safetensors
bloom
Eval Results
text-generation-inference
Inference Endpoints
bigscience-bot commited on
Commit
bea8142
1 Parent(s): e0e6b58
Files changed (2) hide show
  1. config.json +1 -9
  2. pytorch_model.bin.index.json +1 -1
config.json CHANGED
@@ -2,26 +2,18 @@
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
  "attention_softmax_in_fp32": true,
5
- "bias_dropout_fusion": true,
6
  "bos_token_id": 1,
7
- "dtype": "bfloat16",
8
  "eos_token_id": 2,
9
- "pad_token_id": 3,
10
- "unk_token_id": 0,
11
  "hidden_dropout": 0.0,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "masked_softmax_fusion": true,
15
  "model_type": "bloom",
16
  "n_embed": 14336,
17
- "n_inner": null,
18
  "n_layer": 70,
19
  "num_attention_heads": 112,
20
- "offset_alibi": 100,
21
  "pretraining_tp": 4,
22
- "seq_length": 2048,
23
- "skip_bias_add": true,
24
- "skip_bias_add_qkv": false,
25
  "transformers_version": "4.20.0.dev0",
26
  "use_cache": true,
27
  "vocab_size": 250880
 
2
  "apply_residual_connection_post_layernorm": false,
3
  "attention_dropout": 0.0,
4
  "attention_softmax_in_fp32": true,
 
5
  "bos_token_id": 1,
 
6
  "eos_token_id": 2,
 
 
7
  "hidden_dropout": 0.0,
8
  "initializer_range": 0.02,
9
  "layer_norm_epsilon": 1e-05,
10
  "masked_softmax_fusion": true,
11
  "model_type": "bloom",
12
  "n_embed": 14336,
 
13
  "n_layer": 70,
14
  "num_attention_heads": 112,
 
15
  "pretraining_tp": 4,
16
+ "slow_but_exact": false,
 
 
17
  "transformers_version": "4.20.0.dev0",
18
  "use_cache": true,
19
  "vocab_size": 250880
pytorch_model.bin.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 256701667093757598507563477393900852847546398236101558255428058075828224
4
  },
5
  "weight_map": {
6
  "h.0.input_layernorm.bias": "pytorch_model_00002-of-00072.bin",
 
1
  {
2
  "metadata": {
3
+ "total_size": 352494542848
4
  },
5
  "weight_map": {
6
  "h.0.input_layernorm.bias": "pytorch_model_00002-of-00072.bin",