xshubhamx's picture
Upload folder using huggingface_hub
95b46e6 verified
{
"best_metric": 0.3957532686340496,
"best_model_checkpoint": "bart-base-lora/checkpoint-160",
"epoch": 0.995334370139969,
"eval_steps": 500,
"global_step": 160,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 1.0,
"eval_accuracy": 0.715724244771495,
"eval_f1_macro": 0.3957532686340496,
"eval_f1_micro": 0.715724244771495,
"eval_f1_weighted": 0.6834645556416842,
"eval_loss": 0.9525313973426819,
"eval_macro_fpr": 0.028501626692807605,
"eval_macro_sensitivity": 0.4415906678049911,
"eval_macro_specificity": 0.9786637894661117,
"eval_precision": 0.6787596592263883,
"eval_precision_macro": 0.38751339695744513,
"eval_recall": 0.715724244771495,
"eval_recall_macro": 0.4415906678049911,
"eval_runtime": 120.9594,
"eval_samples_per_second": 10.673,
"eval_steps_per_second": 1.339,
"eval_weighted_fpr": 0.027587762158911525,
"eval_weighted_sensitivity": 0.715724244771495,
"eval_weighted_specificity": 0.9642325972201805,
"step": 160
}
],
"logging_steps": 500,
"max_steps": 2400,
"num_train_epochs": 15,
"save_steps": 500,
"total_flos": 5721423736154112.0,
"trial_name": null,
"trial_params": null
}