ppo-Huggy / run_logs /training_status.json
zacdennis's picture
Huggy
4cb2407
{
"Huggy": {
"checkpoints": [
{
"steps": 199813,
"file_path": "results/Huggy/Huggy/Huggy-199813.onnx",
"reward": 3.3652980535749406,
"creation_time": 1690225731.9607854,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199813.pt"
]
},
{
"steps": 399938,
"file_path": "results/Huggy/Huggy/Huggy-399938.onnx",
"reward": 3.5870629445366236,
"creation_time": 1690225996.6579437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399938.pt"
]
},
{
"steps": 599942,
"file_path": "results/Huggy/Huggy/Huggy-599942.onnx",
"reward": 3.79313148856163,
"creation_time": 1690226262.6563256,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599942.pt"
]
},
{
"steps": 799965,
"file_path": "results/Huggy/Huggy/Huggy-799965.onnx",
"reward": 3.8141127623976234,
"creation_time": 1690226519.5264883,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799965.pt"
]
},
{
"steps": 999954,
"file_path": "results/Huggy/Huggy/Huggy-999954.onnx",
"reward": 3.8990817768651933,
"creation_time": 1690226777.2279189,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999954.pt"
]
},
{
"steps": 1199933,
"file_path": "results/Huggy/Huggy/Huggy-1199933.onnx",
"reward": 4.039097522988039,
"creation_time": 1690227037.7918482,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199933.pt"
]
},
{
"steps": 1399979,
"file_path": "results/Huggy/Huggy/Huggy-1399979.onnx",
"reward": 4.477470779418946,
"creation_time": 1690227301.8615134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399979.pt"
]
},
{
"steps": 1599972,
"file_path": "results/Huggy/Huggy/Huggy-1599972.onnx",
"reward": 3.7652458691158177,
"creation_time": 1690227565.284869,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599972.pt"
]
},
{
"steps": 1799648,
"file_path": "results/Huggy/Huggy/Huggy-1799648.onnx",
"reward": 3.5367288012658396,
"creation_time": 1690227835.9660237,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799648.pt"
]
},
{
"steps": 1999942,
"file_path": "results/Huggy/Huggy/Huggy-1999942.onnx",
"reward": 3.209076789709238,
"creation_time": 1690228098.8615155,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999942.pt"
]
},
{
"steps": 2000034,
"file_path": "results/Huggy/Huggy/Huggy-2000034.onnx",
"reward": 3.491362316267831,
"creation_time": 1690228099.024067,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
],
"final_checkpoint": {
"steps": 2000034,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.491362316267831,
"creation_time": 1690228099.024067,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}