ppo-Huggy / run_logs /training_status.json
liyingjian's picture
Huggy
eda98a6
{
"Huggy": {
"checkpoints": [
{
"steps": 199796,
"file_path": "results/Huggy/Huggy/Huggy-199796.onnx",
"reward": 3.49924184407218,
"creation_time": 1688689426.3103032,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199796.pt"
]
},
{
"steps": 399903,
"file_path": "results/Huggy/Huggy/Huggy-399903.onnx",
"reward": 3.4477861566820005,
"creation_time": 1688689680.2011397,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399903.pt"
]
},
{
"steps": 599946,
"file_path": "results/Huggy/Huggy/Huggy-599946.onnx",
"reward": 2.849509690489088,
"creation_time": 1688689932.5953186,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599946.pt"
]
},
{
"steps": 799985,
"file_path": "results/Huggy/Huggy/Huggy-799985.onnx",
"reward": 4.144954915153248,
"creation_time": 1688690184.4720945,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799985.pt"
]
},
{
"steps": 999972,
"file_path": "results/Huggy/Huggy/Huggy-999972.onnx",
"reward": 3.9491665197985015,
"creation_time": 1688690444.614472,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999972.pt"
]
},
{
"steps": 1199927,
"file_path": "results/Huggy/Huggy/Huggy-1199927.onnx",
"reward": 3.8060619595803713,
"creation_time": 1688690709.7465162,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199927.pt"
]
},
{
"steps": 1399927,
"file_path": "results/Huggy/Huggy/Huggy-1399927.onnx",
"reward": 3.896573841571808,
"creation_time": 1688690963.5501745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399927.pt"
]
},
{
"steps": 1599683,
"file_path": "results/Huggy/Huggy/Huggy-1599683.onnx",
"reward": 3.8789568727399097,
"creation_time": 1688691211.9391398,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599683.pt"
]
},
{
"steps": 1799399,
"file_path": "results/Huggy/Huggy/Huggy-1799399.onnx",
"reward": 3.788160323094921,
"creation_time": 1688691467.9236834,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799399.pt"
]
},
{
"steps": 1999873,
"file_path": "results/Huggy/Huggy/Huggy-1999873.onnx",
"reward": 3.922549657523632,
"creation_time": 1688691722.8778934,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999873.pt"
]
},
{
"steps": 2000002,
"file_path": "results/Huggy/Huggy/Huggy-2000002.onnx",
"reward": 3.953429321853482,
"creation_time": 1688691723.0125983,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000002.pt"
]
}
],
"final_checkpoint": {
"steps": 2000002,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.953429321853482,
"creation_time": 1688691723.0125983,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000002.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}