wmt21-comet-qe-da / hparams.yaml
zwhe99's picture
correct model
fd1ed12
raw
history blame contribute delete
463 Bytes
nr_frozen_epochs: 0.3
keep_embeddings_frozen: true
optimizer: AdamW
encoder_learning_rate: 1.0e-05
learning_rate: 3.1e-05
layerwise_decay: 0.95
encoder_model: XLM-RoBERTa
pretrained_model: xlm-roberta-large
pool: avg
layer: mix
dropout: 0.15
batch_size: 4
class_identifier: referenceless_regression_metric
train_data: data/scores-1520.csv
validation_data: data/mqm.test.z_score.csv
hidden_sizes:
- 2048
- 1024
activations: Tanh
load_weights_from_checkpoint: null