ppo-Huggy / run_logs /training_status.json
Sviatoslavs's picture
Huggy
dd74e37
{
"Huggy": {
"checkpoints": [
{
"steps": 199993,
"file_path": "results/Huggy/Huggy/Huggy-199993.onnx",
"reward": 3.4207649571555003,
"creation_time": 1692610536.384306,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199993.pt"
]
},
{
"steps": 399904,
"file_path": "results/Huggy/Huggy/Huggy-399904.onnx",
"reward": 4.068756348844888,
"creation_time": 1692610810.8890727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399904.pt"
]
},
{
"steps": 599302,
"file_path": "results/Huggy/Huggy/Huggy-599302.onnx",
"reward": 3.835407221317291,
"creation_time": 1692611085.6545615,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599302.pt"
]
},
{
"steps": 799467,
"file_path": "results/Huggy/Huggy/Huggy-799467.onnx",
"reward": 3.819728610529141,
"creation_time": 1692611360.8024087,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799467.pt"
]
},
{
"steps": 999966,
"file_path": "results/Huggy/Huggy/Huggy-999966.onnx",
"reward": 3.6258226245168657,
"creation_time": 1692611640.1786928,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999966.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy/Huggy/Huggy-1199964.onnx",
"reward": 3.6786878074424854,
"creation_time": 1692611918.217797,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399681,
"file_path": "results/Huggy/Huggy/Huggy-1399681.onnx",
"reward": 3.6441612652919257,
"creation_time": 1692612192.2685266,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399681.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy/Huggy/Huggy-1599998.onnx",
"reward": 3.8977226603489656,
"creation_time": 1692612470.850136,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799878,
"file_path": "results/Huggy/Huggy/Huggy-1799878.onnx",
"reward": 3.672580973132626,
"creation_time": 1692612749.3258104,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799878.pt"
]
},
{
"steps": 1999885,
"file_path": "results/Huggy/Huggy/Huggy-1999885.onnx",
"reward": 3.9822731812795005,
"creation_time": 1692613028.322552,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999885.pt"
]
},
{
"steps": 2000635,
"file_path": "results/Huggy/Huggy/Huggy-2000635.onnx",
"reward": 3.707815017700195,
"creation_time": 1692613028.4854724,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000635.pt"
]
}
],
"final_checkpoint": {
"steps": 2000635,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.707815017700195,
"creation_time": 1692613028.4854724,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000635.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}