ggcristian commited on
Commit
27e2df5
1 Parent(s): 8ae9bc2

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +88 -0
config.json ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "TinyEmoForConditionalGeneration"
4
+ ],
5
+ "cache_dir": null,
6
+ "connector_type": "mlp2x_gelu",
7
+ "hidden_size": 1280,
8
+ "ignore_index": -100,
9
+ "image_aspect_ratio": "square",
10
+ "image_token_index": -200,
11
+ "llm_model_name_or_path": "apple/OpenELM-270M-Instruct",
12
+ "model_type": "tinyemo",
13
+ "auto_map": {
14
+ "AutoConfig": "configuration_openelm.OpenELMConfig",
15
+ "AutoModelForCausalLM": "modeling_openelm.OpenELMForCausalLM"
16
+ },
17
+ "num_queries": 128,
18
+ "num_resampler_layers": 3,
19
+ "pad_token": "<|endoftext|>",
20
+ "pad_token_id": 1,
21
+ "resampler_hidden_size": 768,
22
+ "text_config": {
23
+ "_name_or_path": "apple/OpenELM-270M-Instruct",
24
+ "architectures": [
25
+ "OpenELMForCausalLM"
26
+ ],
27
+ "auto_map": {
28
+ "AutoConfig": "configuration_openelm.OpenELMConfig",
29
+ "AutoModelForCausalLM": "modeling_openelm.OpenELMForCausalLM"
30
+ },
31
+ "bos_token_id": 1,
32
+ "eos_token_id": 2,
33
+ "ffn_dim_divisor": 256,
34
+ "ffn_multipliers": [
35
+ 0.5, 0.73, 0.97, 1.2, 1.43, 1.67, 1.9, 2.13, 2.37, 2.6, 2.83, 3.07, 3.3, 3.53, 3.77, 4.0
36
+ ],
37
+ "ffn_with_glu": true,
38
+ "hidden_act": "swish",
39
+ "head_dim": 64,
40
+ "initializer_range": 0.02,
41
+ "max_context_length": 2048,
42
+ "model_dim": 1280,
43
+ "model_type": "openelm",
44
+ "normalization_layer_name": "rms_norm",
45
+ "normalize_qk_projections": true,
46
+ "num_gqa_groups": 4,
47
+ "num_kv_heads": [3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5],
48
+ "num_query_heads": [12, 12, 12, 12, 12, 16, 16, 16, 16, 16, 16, 16, 20, 20, 20, 20],
49
+ "num_transformer_layers": 16,
50
+ "qkv_multipliers": [0.5, 1.0],
51
+ "rope_freq_constant": 10000,
52
+ "rope_max_length": 4096,
53
+ "share_input_output_layers": true,
54
+ "torch_dtype": "bfloat16",
55
+ "vocab_size": 32000
56
+ },
57
+ "tokenizer_model_max_length": 4096,
58
+ "tokenizer_padding_side": "right",
59
+ "tokenizer_use_fast": false,
60
+ "torch_dtype": "bfloat16",
61
+ "transformers_version": "4.39.3",
62
+ "tune_type_connector": "full",
63
+ "tune_type_llm": "full",
64
+ "tune_type_vision_tower": "frozen",
65
+ "tune_vision_tower_from_layer": 0,
66
+ "use_cache": true,
67
+ "vision_config": {
68
+ "_name_or_path": "openai/clip-vit-large-patch14",
69
+ "architectures": [
70
+ "CLIPModel"
71
+ ],
72
+ "hidden_act": "quick_gelu",
73
+ "hidden_size": 1024,
74
+ "image_size": 224,
75
+ "intermediate_size": 4096,
76
+ "layer_norm_eps": 1e-05,
77
+ "model_type": "clip_vision_model",
78
+ "num_attention_heads": 16,
79
+ "num_hidden_layers": 24,
80
+ "patch_size": 14,
81
+ "projection_dim": 768
82
+ },
83
+ "vision_feature_layer": -2,
84
+ "vision_feature_select_strategy": "patch",
85
+ "vision_hidden_size": 1024,
86
+ "vision_model_name_or_path": "openai/clip-vit-large-patch14",
87
+ "vocab_size": 32000
88
+ }