Training in progress, step 500
Browse files- adapter_config.json +30 -0
- adapter_model.safetensors +3 -0
- preprocessor_config.json +14 -0
- runs/Feb25_00-35-12_97bd66950bc1/events.out.tfevents.1708821313.97bd66950bc1.4779.0 +3 -0
- runs/Feb25_00-37-51_97bd66950bc1/events.out.tfevents.1708821472.97bd66950bc1.5834.0 +3 -0
- runs/Feb25_01-09-59_97bd66950bc1/events.out.tfevents.1708823399.97bd66950bc1.13989.0 +3 -0
- training_args.bin +3 -0
adapter_config.json
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"alpha_pattern": {},
|
3 |
+
"auto_mapping": {
|
4 |
+
"base_model_class": "WhisperForConditionalGeneration",
|
5 |
+
"parent_library": "transformers.models.whisper.modeling_whisper"
|
6 |
+
},
|
7 |
+
"base_model_name_or_path": "openai/whisper-small",
|
8 |
+
"bias": "none",
|
9 |
+
"fan_in_fan_out": false,
|
10 |
+
"inference_mode": true,
|
11 |
+
"init_lora_weights": true,
|
12 |
+
"layers_pattern": null,
|
13 |
+
"layers_to_transform": null,
|
14 |
+
"loftq_config": {},
|
15 |
+
"lora_alpha": 64,
|
16 |
+
"lora_dropout": 0.05,
|
17 |
+
"megatron_config": null,
|
18 |
+
"megatron_core": "megatron.core",
|
19 |
+
"modules_to_save": null,
|
20 |
+
"peft_type": "LORA",
|
21 |
+
"r": 32,
|
22 |
+
"rank_pattern": {},
|
23 |
+
"revision": null,
|
24 |
+
"target_modules": [
|
25 |
+
"q_proj",
|
26 |
+
"v_proj"
|
27 |
+
],
|
28 |
+
"task_type": null,
|
29 |
+
"use_rslora": false
|
30 |
+
}
|
adapter_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22164b2788575a6806324fa70ee4156cf2b033eb2289e133ac77812712129ae8
|
3 |
+
size 14176064
|
preprocessor_config.json
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"chunk_length": 30,
|
3 |
+
"feature_extractor_type": "WhisperFeatureExtractor",
|
4 |
+
"feature_size": 80,
|
5 |
+
"hop_length": 160,
|
6 |
+
"n_fft": 400,
|
7 |
+
"n_samples": 480000,
|
8 |
+
"nb_max_frames": 3000,
|
9 |
+
"padding_side": "right",
|
10 |
+
"padding_value": 0.0,
|
11 |
+
"processor_class": "WhisperProcessor",
|
12 |
+
"return_attention_mask": false,
|
13 |
+
"sampling_rate": 16000
|
14 |
+
}
|
runs/Feb25_00-35-12_97bd66950bc1/events.out.tfevents.1708821313.97bd66950bc1.4779.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1587021f6b9ad2baa59071a9f9e1627821d1198dbb7b7c28c72062a89bc6b40b
|
3 |
+
size 4184
|
runs/Feb25_00-37-51_97bd66950bc1/events.out.tfevents.1708821472.97bd66950bc1.5834.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:03637124e173f6bc554e064aa32e02e81fd5217463e490c5a5e0aac40cff6482
|
3 |
+
size 9432
|
runs/Feb25_01-09-59_97bd66950bc1/events.out.tfevents.1708823399.97bd66950bc1.13989.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e50847b893c0d9c66250ffbd9c579bab89a4b485103b4faa31539e0bbc1e70ac
|
3 |
+
size 10336
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7b2eec5a37e10f51eb7da0f6d421ab7c2d15f11e30274ad7efbdfbb283390ec2
|
3 |
+
size 5048
|