ppo-Huggy / run_logs /training_status.json
nigelyeap's picture
Huggy
aafdf69
{
"Huggy": {
"checkpoints": [
{
"steps": 199878,
"file_path": "results/Huggy/Huggy/Huggy-199878.onnx",
"reward": 3.4312705049912133,
"creation_time": 1693833622.4148984,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199878.pt"
]
},
{
"steps": 399968,
"file_path": "results/Huggy/Huggy/Huggy-399968.onnx",
"reward": 4.014387057044289,
"creation_time": 1693833873.6026783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399968.pt"
]
},
{
"steps": 599864,
"file_path": "results/Huggy/Huggy/Huggy-599864.onnx",
"reward": 4.326934230955024,
"creation_time": 1693834131.1153288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599864.pt"
]
},
{
"steps": 799867,
"file_path": "results/Huggy/Huggy/Huggy-799867.onnx",
"reward": 3.679828051084317,
"creation_time": 1693834380.1746988,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799867.pt"
]
},
{
"steps": 999907,
"file_path": "results/Huggy/Huggy/Huggy-999907.onnx",
"reward": 3.659368012485833,
"creation_time": 1693834639.0764484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999907.pt"
]
},
{
"steps": 1199973,
"file_path": "results/Huggy/Huggy/Huggy-1199973.onnx",
"reward": 3.762160212363837,
"creation_time": 1693834894.345694,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199973.pt"
]
},
{
"steps": 1399638,
"file_path": "results/Huggy/Huggy/Huggy-1399638.onnx",
"reward": 3.7701140959541517,
"creation_time": 1693835144.1660056,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399638.pt"
]
},
{
"steps": 1599988,
"file_path": "results/Huggy/Huggy/Huggy-1599988.onnx",
"reward": 3.6060261862904,
"creation_time": 1693835399.7276964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599988.pt"
]
},
{
"steps": 1799955,
"file_path": "results/Huggy/Huggy/Huggy-1799955.onnx",
"reward": 3.6606377650584494,
"creation_time": 1693835661.5301561,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799955.pt"
]
},
{
"steps": 1999998,
"file_path": "results/Huggy/Huggy/Huggy-1999998.onnx",
"reward": 3.4120879006940266,
"creation_time": 1693835921.8060036,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999998.pt"
]
},
{
"steps": 2000012,
"file_path": "results/Huggy/Huggy/Huggy-2000012.onnx",
"reward": 3.374486896124753,
"creation_time": 1693835921.934671,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000012.pt"
]
}
],
"final_checkpoint": {
"steps": 2000012,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.374486896124753,
"creation_time": 1693835921.934671,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000012.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}