ppo-Huggy / run_logs /training_status.json
pedroroblesduten's picture
Huggy
61c67c0
{
"Huggy": {
"checkpoints": [
{
"steps": 199943,
"file_path": "results/Huggy/Huggy/Huggy-199943.onnx",
"reward": 3.408896161549127,
"creation_time": 1690748728.118031,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199943.pt"
]
},
{
"steps": 399882,
"file_path": "results/Huggy/Huggy/Huggy-399882.onnx",
"reward": 4.109834141650443,
"creation_time": 1690749002.6825218,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399882.pt"
]
},
{
"steps": 599959,
"file_path": "results/Huggy/Huggy/Huggy-599959.onnx",
"reward": 3.4798481085083703,
"creation_time": 1690749281.0915453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599959.pt"
]
},
{
"steps": 799739,
"file_path": "results/Huggy/Huggy/Huggy-799739.onnx",
"reward": 4.284583362532251,
"creation_time": 1690749554.5733583,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799739.pt"
]
},
{
"steps": 999956,
"file_path": "results/Huggy/Huggy/Huggy-999956.onnx",
"reward": 3.7055131218571593,
"creation_time": 1690749837.639077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999956.pt"
]
},
{
"steps": 1199974,
"file_path": "results/Huggy/Huggy/Huggy-1199974.onnx",
"reward": 3.4807224860114436,
"creation_time": 1690750115.4515555,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199974.pt"
]
},
{
"steps": 1399844,
"file_path": "results/Huggy/Huggy/Huggy-1399844.onnx",
"reward": 3.7785990203299176,
"creation_time": 1690750387.5258844,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399844.pt"
]
},
{
"steps": 1599925,
"file_path": "results/Huggy/Huggy/Huggy-1599925.onnx",
"reward": 3.918006560695705,
"creation_time": 1690750671.0321891,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599925.pt"
]
},
{
"steps": 1799845,
"file_path": "results/Huggy/Huggy/Huggy-1799845.onnx",
"reward": 3.524535841720049,
"creation_time": 1690750950.9678152,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799845.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 3.703991413116455,
"creation_time": 1690751229.6554399,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000038,
"file_path": "results/Huggy/Huggy/Huggy-2000038.onnx",
"reward": 3.676140626271566,
"creation_time": 1690751229.7880812,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
],
"final_checkpoint": {
"steps": 2000038,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.676140626271566,
"creation_time": 1690751229.7880812,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}