daekeun-ml's picture
Upload model
3ff8a52
raw
history blame contribute delete
582 Bytes
{
"architectures": [
"SimCSEModel"
],
"base_model": "klue/roberta-large",
"batch_size": 64,
"dataset_dir": "/opt/ml/input/data/training/",
"debug": "False",
"device": "cuda:0",
"eval_steps": 50,
"local_rank": 0,
"lr": 3e-05,
"lr_scheduler_type": "linear",
"max_seq_len": 32,
"num_epochs": 3,
"num_warmup_steps": 0,
"output_dir": "/opt/ml/model/",
"rank": 0,
"save_path": "../model",
"seed": 42,
"temperature": 0.05,
"torch_dtype": "float32",
"transformers_version": "4.33.3",
"use_fp16": "True",
"version": 1.0,
"world_size": 4
}