ppo-Huggy / run_logs /training_status.json
Sukmin's picture
Huggy
4406898
{
"Huggy": {
"checkpoints": [
{
"steps": 199345,
"file_path": "results/Huggy/Huggy/Huggy-199345.onnx",
"reward": 2.9448174925173745,
"creation_time": 1687331894.6417053,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199345.pt"
]
},
{
"steps": 399916,
"file_path": "results/Huggy/Huggy/Huggy-399916.onnx",
"reward": 3.092244420732771,
"creation_time": 1687332205.0130596,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399916.pt"
]
},
{
"steps": 599684,
"file_path": "results/Huggy/Huggy/Huggy-599684.onnx",
"reward": 3.4586245618961953,
"creation_time": 1687332509.2951677,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599684.pt"
]
},
{
"steps": 799652,
"file_path": "results/Huggy/Huggy/Huggy-799652.onnx",
"reward": 3.71820586631375,
"creation_time": 1687332822.560128,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799652.pt"
]
},
{
"steps": 999938,
"file_path": "results/Huggy/Huggy/Huggy-999938.onnx",
"reward": 3.6049024143689117,
"creation_time": 1687333137.441577,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999938.pt"
]
},
{
"steps": 1199895,
"file_path": "results/Huggy/Huggy/Huggy-1199895.onnx",
"reward": 3.5110867727886546,
"creation_time": 1687333446.0209775,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199895.pt"
]
},
{
"steps": 1399963,
"file_path": "results/Huggy/Huggy/Huggy-1399963.onnx",
"reward": 3.593450170976144,
"creation_time": 1687333745.3359556,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399963.pt"
]
},
{
"steps": 1599954,
"file_path": "results/Huggy/Huggy/Huggy-1599954.onnx",
"reward": 3.899758051548685,
"creation_time": 1687334045.7299774,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599954.pt"
]
},
{
"steps": 1799848,
"file_path": "results/Huggy/Huggy/Huggy-1799848.onnx",
"reward": 3.7045456986678276,
"creation_time": 1687334356.0061338,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799848.pt"
]
},
{
"steps": 1999410,
"file_path": "results/Huggy/Huggy/Huggy-1999410.onnx",
"reward": 3.4642043129393927,
"creation_time": 1687334653.1673493,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999410.pt"
]
},
{
"steps": 2000160,
"file_path": "results/Huggy/Huggy/Huggy-2000160.onnx",
"reward": 3.422062524783066,
"creation_time": 1687334653.4493077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000160.pt"
]
}
],
"final_checkpoint": {
"steps": 2000160,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.422062524783066,
"creation_time": 1687334653.4493077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000160.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}