anhdt-dsai-02 commited on
Commit
295938b
1 Parent(s): 7206ac5

Model save

Browse files
Files changed (2) hide show
  1. README.md +5 -5
  2. generation_config.json +3 -0
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- base_model: vinai/bartpho-syllable
3
  tags:
4
  - generated_from_trainer
5
  model-index:
@@ -12,7 +12,7 @@ should probably proofread and complete it, then remove this comment. -->
12
 
13
  # sentiment_25_12
14
 
15
- This model is a fine-tuned version of [vinai/bartpho-syllable](https://huggingface.co/vinai/bartpho-syllable) on an unknown dataset.
16
 
17
  ## Model description
18
 
@@ -32,8 +32,8 @@ More information needed
32
 
33
  The following hyperparameters were used during training:
34
  - learning_rate: 1e-05
35
- - train_batch_size: 16
36
- - eval_batch_size: 16
37
  - seed: 42
38
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
39
  - lr_scheduler_type: linear
@@ -43,5 +43,5 @@ The following hyperparameters were used during training:
43
 
44
  - Transformers 4.36.2
45
  - Pytorch 2.1.0+cu121
46
- - Datasets 2.16.0
47
  - Tokenizers 0.15.0
 
1
  ---
2
+ base_model: facebook/mbart-large-50-many-to-many-mmt
3
  tags:
4
  - generated_from_trainer
5
  model-index:
 
12
 
13
  # sentiment_25_12
14
 
15
+ This model is a fine-tuned version of [facebook/mbart-large-50-many-to-many-mmt](https://huggingface.co/facebook/mbart-large-50-many-to-many-mmt) on an unknown dataset.
16
 
17
  ## Model description
18
 
 
32
 
33
  The following hyperparameters were used during training:
34
  - learning_rate: 1e-05
35
+ - train_batch_size: 8
36
+ - eval_batch_size: 8
37
  - seed: 42
38
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
39
  - lr_scheduler_type: linear
 
43
 
44
  - Transformers 4.36.2
45
  - Pytorch 2.1.0+cu121
46
+ - Datasets 2.16.1
47
  - Tokenizers 0.15.0
generation_config.json CHANGED
@@ -1,8 +1,11 @@
1
  {
2
  "bos_token_id": 0,
3
  "decoder_start_token_id": 2,
 
4
  "eos_token_id": 2,
5
  "forced_eos_token_id": 2,
 
 
6
  "pad_token_id": 1,
7
  "transformers_version": "4.36.2"
8
  }
 
1
  {
2
  "bos_token_id": 0,
3
  "decoder_start_token_id": 2,
4
+ "early_stopping": true,
5
  "eos_token_id": 2,
6
  "forced_eos_token_id": 2,
7
+ "max_length": 200,
8
+ "num_beams": 5,
9
  "pad_token_id": 1,
10
  "transformers_version": "4.36.2"
11
  }