Huggy / run_logs /training_status.json
Adulala20's picture
Huggy
c4349b0
{
"Huggy": {
"checkpoints": [
{
"steps": 199863,
"file_path": "results/Huggy/Huggy/Huggy-199863.onnx",
"reward": 3.3841433139408337,
"creation_time": 1690700607.7506068,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199863.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 4.01371475004814,
"creation_time": 1690700846.991179,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599983,
"file_path": "results/Huggy/Huggy/Huggy-599983.onnx",
"reward": 3.941960025716711,
"creation_time": 1690701088.6022332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599983.pt"
]
},
{
"steps": 799863,
"file_path": "results/Huggy/Huggy/Huggy-799863.onnx",
"reward": 3.8306134566282615,
"creation_time": 1690701327.4866726,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799863.pt"
]
},
{
"steps": 999999,
"file_path": "results/Huggy/Huggy/Huggy-999999.onnx",
"reward": 3.805561321509349,
"creation_time": 1690701572.0473516,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999999.pt"
]
},
{
"steps": 1199962,
"file_path": "results/Huggy/Huggy/Huggy-1199962.onnx",
"reward": 3.606831595079223,
"creation_time": 1690701816.5325158,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199962.pt"
]
},
{
"steps": 1399906,
"file_path": "results/Huggy/Huggy/Huggy-1399906.onnx",
"reward": 4.59060980214013,
"creation_time": 1690702061.9999366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399906.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy/Huggy/Huggy-1599944.onnx",
"reward": 3.8515543752575514,
"creation_time": 1690702299.5450852,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 3.7853708825212844,
"creation_time": 1690702546.3649757,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999972,
"file_path": "results/Huggy/Huggy/Huggy-1999972.onnx",
"reward": 3.055641395705087,
"creation_time": 1690702793.114514,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999972.pt"
]
},
{
"steps": 2000031,
"file_path": "results/Huggy/Huggy/Huggy-2000031.onnx",
"reward": 3.130187749862671,
"creation_time": 1690702793.2342205,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000031.pt"
]
}
],
"final_checkpoint": {
"steps": 2000031,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.130187749862671,
"creation_time": 1690702793.2342205,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000031.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}