ppo-Huggy / run_logs /training_status.json
Schwarzschild009's picture
Huggy
93e76a9
{
"Huggy": {
"checkpoints": [
{
"steps": 199862,
"file_path": "results/Huggy/Huggy/Huggy-199862.onnx",
"reward": 3.169571754565606,
"creation_time": 1683834741.0672,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199862.pt"
]
},
{
"steps": 399872,
"file_path": "results/Huggy/Huggy/Huggy-399872.onnx",
"reward": 3.7648260823317936,
"creation_time": 1683834977.0351796,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399872.pt"
]
},
{
"steps": 599946,
"file_path": "results/Huggy/Huggy/Huggy-599946.onnx",
"reward": 3.627560636271601,
"creation_time": 1683835216.807626,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599946.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy/Huggy/Huggy-799934.onnx",
"reward": 3.593174814862549,
"creation_time": 1683835453.6482246,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999948,
"file_path": "results/Huggy/Huggy/Huggy-999948.onnx",
"reward": 3.8508136742390118,
"creation_time": 1683835693.120402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999948.pt"
]
},
{
"steps": 1199915,
"file_path": "results/Huggy/Huggy/Huggy-1199915.onnx",
"reward": 4.064860724935345,
"creation_time": 1683835928.7064316,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199915.pt"
]
},
{
"steps": 1399985,
"file_path": "results/Huggy/Huggy/Huggy-1399985.onnx",
"reward": 3.6805071796689717,
"creation_time": 1683836162.8420715,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399985.pt"
]
},
{
"steps": 1599954,
"file_path": "results/Huggy/Huggy/Huggy-1599954.onnx",
"reward": 3.477696364124616,
"creation_time": 1683836403.9771605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599954.pt"
]
},
{
"steps": 1799483,
"file_path": "results/Huggy/Huggy/Huggy-1799483.onnx",
"reward": 3.559863494921334,
"creation_time": 1683836645.639301,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799483.pt"
]
},
{
"steps": 1999996,
"file_path": "results/Huggy/Huggy/Huggy-1999996.onnx",
"reward": 3.3777132630348206,
"creation_time": 1683836886.161217,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999996.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy/Huggy/Huggy-2000041.onnx",
"reward": 3.425525760650635,
"creation_time": 1683836886.3541613,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.425525760650635,
"creation_time": 1683836886.3541613,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}