VarunGumma commited on
Commit
ba7db23
·
verified ·
1 Parent(s): f62d94a

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +14 -8
config.json CHANGED
@@ -1,24 +1,30 @@
1
  {
 
2
  "activation_dropout": 0.0,
3
  "activation_function": "gelu",
 
 
 
 
 
4
  "architectures": [
5
  "RotaryIndicTransForConditionalGeneration"
6
  ],
7
  "attention_dropout": 0.0,
8
  "attn_implementation": "eager",
9
  "bos_token_id": 0,
10
- "decoder_attention_heads": 8,
11
- "decoder_embed_dim": 512,
12
- "decoder_ffn_dim": 2048,
13
  "decoder_layerdrop": 0,
14
  "decoder_layers": 18,
15
  "decoder_normalize_before": true,
16
  "decoder_start_token_id": 2,
17
  "decoder_vocab_size": 122672,
18
  "dropout": 0.2,
19
- "encoder_attention_heads": 8,
20
- "encoder_embed_dim": 512,
21
- "encoder_ffn_dim": 2048,
22
  "encoder_layerdrop": 0,
23
  "encoder_layers": 18,
24
  "encoder_normalize_before": true,
@@ -26,7 +32,7 @@
26
  "eos_token_id": 2,
27
  "init_std": 0.02,
28
  "is_encoder_decoder": true,
29
- "layernorm_embedding": true,
30
  "model_type": "RotaryIndicTrans",
31
  "num_hidden_layers": 18,
32
  "pad_token_id": 1,
@@ -34,7 +40,7 @@
34
  "theta": 10000
35
  },
36
  "scale_embedding": true,
37
- "share_decoder_input_output_embed": true,
38
  "torch_dtype": "float32",
39
  "transformers_version": "4.44.0",
40
  "use_cache": true
 
1
  {
2
+ "name_or_path": "VarunGumma/rotary-indictrans2-en-indic-1B",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_rotary_indictrans.RotaryIndicTransConfig",
7
+ "AutoModelForSeq2SeqLM": "modeling_rotary_indictrans.RotaryIndicTransForConditionalGeneration"
8
+ },
9
+ "tokenizer_class": "IndicTransTokenizer",
10
  "architectures": [
11
  "RotaryIndicTransForConditionalGeneration"
12
  ],
13
  "attention_dropout": 0.0,
14
  "attn_implementation": "eager",
15
  "bos_token_id": 0,
16
+ "decoder_attention_heads": 16,
17
+ "decoder_embed_dim": 1024,
18
+ "decoder_ffn_dim": 8192,
19
  "decoder_layerdrop": 0,
20
  "decoder_layers": 18,
21
  "decoder_normalize_before": true,
22
  "decoder_start_token_id": 2,
23
  "decoder_vocab_size": 122672,
24
  "dropout": 0.2,
25
+ "encoder_attention_heads": 16,
26
+ "encoder_embed_dim": 1024,
27
+ "encoder_ffn_dim": 8192,
28
  "encoder_layerdrop": 0,
29
  "encoder_layers": 18,
30
  "encoder_normalize_before": true,
 
32
  "eos_token_id": 2,
33
  "init_std": 0.02,
34
  "is_encoder_decoder": true,
35
+ "layernorm_embedding": false,
36
  "model_type": "RotaryIndicTrans",
37
  "num_hidden_layers": 18,
38
  "pad_token_id": 1,
 
40
  "theta": 10000
41
  },
42
  "scale_embedding": true,
43
+ "share_decoder_input_output_embed": false,
44
  "torch_dtype": "float32",
45
  "transformers_version": "4.44.0",
46
  "use_cache": true