ppo-Huggy / run_logs /training_status.json
bitwild's picture
Huggy
5c9b530
{
"Huggy": {
"checkpoints": [
{
"steps": 199885,
"file_path": "results/Huggy/Huggy/Huggy-199885.onnx",
"reward": 3.8897251204440466,
"creation_time": 1690250678.243402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199885.pt"
]
},
{
"steps": 399992,
"file_path": "results/Huggy/Huggy/Huggy-399992.onnx",
"reward": 3.2304334261932888,
"creation_time": 1690250922.2604651,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399992.pt"
]
},
{
"steps": 599920,
"file_path": "results/Huggy/Huggy/Huggy-599920.onnx",
"reward": 4.660303890705109,
"creation_time": 1690251173.2705364,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599920.pt"
]
},
{
"steps": 799879,
"file_path": "results/Huggy/Huggy/Huggy-799879.onnx",
"reward": 3.945044503893171,
"creation_time": 1690251418.4022274,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799879.pt"
]
},
{
"steps": 999984,
"file_path": "results/Huggy/Huggy/Huggy-999984.onnx",
"reward": 3.666853402100556,
"creation_time": 1690251667.1055484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999984.pt"
]
},
{
"steps": 1199954,
"file_path": "results/Huggy/Huggy/Huggy-1199954.onnx",
"reward": 3.6102915468968844,
"creation_time": 1690251917.919275,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199954.pt"
]
},
{
"steps": 1399991,
"file_path": "results/Huggy/Huggy/Huggy-1399991.onnx",
"reward": 3.6139038312129483,
"creation_time": 1690252163.7455244,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399991.pt"
]
},
{
"steps": 1599903,
"file_path": "results/Huggy/Huggy/Huggy-1599903.onnx",
"reward": 3.904216895086898,
"creation_time": 1690252412.491574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599903.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 4.263387214342753,
"creation_time": 1690252659.3849127,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999913,
"file_path": "results/Huggy/Huggy/Huggy-1999913.onnx",
"reward": 4.41541954718138,
"creation_time": 1690252907.2727973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999913.pt"
]
},
{
"steps": 2000195,
"file_path": "results/Huggy/Huggy/Huggy-2000195.onnx",
"reward": 4.482057875394821,
"creation_time": 1690252907.4784179,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000195.pt"
]
}
],
"final_checkpoint": {
"steps": 2000195,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.482057875394821,
"creation_time": 1690252907.4784179,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000195.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}