Delta-Vector commited on
Commit
72e6fc3
·
verified ·
1 Parent(s): 16bbead

Upload Gemma-Nemo-C2-Chatml.yml

Browse files
Files changed (1) hide show
  1. Gemma-Nemo-C2-Chatml.yml +100 -0
Gemma-Nemo-C2-Chatml.yml ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model: SillyTilly/google-gemma-2-9b
2
+ model_type: AutoModelForCausalLM
3
+ tokenizer_type: AutoTokenizer
4
+
5
+ plugins:
6
+ - axolotl.integrations.liger.LigerPlugin
7
+ liger_rope: true
8
+ liger_rms_norm: true
9
+ liger_swiglu: true
10
+ #liger_cross_entropy: true
11
+ liger_fused_linear_cross_entropy: true
12
+
13
+ load_in_8bit: false
14
+ load_in_4bit: false
15
+ strict: false
16
+
17
+ datasets:
18
+ - path: anthracite-core/c2_logs_16k_llama_v1.1
19
+ type: sharegpt
20
+ conversation: chatml
21
+ - path: anthracite-org/kalo-opus-instruct-22k-no-refusal
22
+ type: sharegpt
23
+ conversation: chatml
24
+ - path: Epiculous/SynthRP-Gens-v1.1-Filtered-n-Cleaned
25
+ type: sharegpt
26
+ conversation: chatml
27
+ - path: lodrick-the-lafted/kalo-opus-instruct-3k-filtered
28
+ type: sharegpt
29
+ conversation: chatml
30
+ - path: anthracite-org/nopm_claude_writing_fixed
31
+ type: sharegpt
32
+ conversation: chatml
33
+ - path: Epiculous/Synthstruct-Gens-v1.1-Filtered-n-Cleaned
34
+ type: sharegpt
35
+ conversation: chatml
36
+ - path: anthracite-org/kalo_opus_misc_240827
37
+ type: sharegpt
38
+ conversation: chatml
39
+ - path: anthracite-org/kalo_misc_part2
40
+ type: sharegpt
41
+ conversation: chatml
42
+ chat_template: chatml
43
+ shuffle_merged_datasets: false
44
+ default_system_message: "You are a helpful assistant that responds to the user."
45
+ dataset_prepared_path: /workspace/data/9b-fft-data
46
+ val_set_size: 0.0
47
+ output_dir: /workspace/data/9b-fft-out
48
+
49
+ sequence_len: 8192
50
+ sample_packing: true
51
+ eval_sample_packing: false
52
+ pad_to_sequence_len: true
53
+
54
+ adapter:
55
+ lora_model_dir:
56
+ lora_r:
57
+ lora_alpha:
58
+ lora_dropout:
59
+ lora_target_linear:
60
+ lora_fan_in_fan_out:
61
+
62
+ wandb_project: 9b-Nemo-config-fft
63
+ wandb_entity:
64
+ wandb_watch:
65
+ wandb_name: attempt-01
66
+ wandb_log_model:
67
+
68
+ gradient_accumulation_steps: 2
69
+ micro_batch_size: 2
70
+ num_epochs: 4
71
+ optimizer: adamw_bnb_8bit
72
+ lr_scheduler: cosine
73
+ learning_rate: 0.00001
74
+
75
+ train_on_inputs: false
76
+ group_by_length: false
77
+ bf16: auto
78
+ fp16:
79
+ tf32: false
80
+
81
+ gradient_checkpointing: true
82
+ early_stopping_patience:
83
+ auto_resume_from_checkpoints: true
84
+ local_rank:
85
+ logging_steps: 1
86
+ xformers_attention:
87
+ flash_attention: true
88
+
89
+ warmup_steps: 10
90
+ evals_per_epoch:
91
+ eval_table_size:
92
+ eval_max_new_tokens:
93
+ saves_per_epoch: 1
94
+ debug:
95
+ deepspeed: deepspeed_configs/zero3_bf16.json
96
+ weight_decay: 0.001
97
+ fsdp:
98
+ fsdp_config:
99
+ special_tokens:
100
+ pad_token: <pad>