ppo-Huggy / run_logs /training_status.json
domjina's picture
Huggy
af9d9a0
{
"Huggy": {
"checkpoints": [
{
"steps": 199937,
"file_path": "results/Huggy/Huggy/Huggy-199937.onnx",
"reward": 3.4437559275857863,
"creation_time": 1690833623.3767605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199937.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy/Huggy/Huggy-399934.onnx",
"reward": 3.7919011040339394,
"creation_time": 1690833867.9384894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599912,
"file_path": "results/Huggy/Huggy/Huggy-599912.onnx",
"reward": 4.309963878832366,
"creation_time": 1690834111.988718,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599912.pt"
]
},
{
"steps": 799928,
"file_path": "results/Huggy/Huggy/Huggy-799928.onnx",
"reward": 3.915643640318695,
"creation_time": 1690834357.4398022,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799928.pt"
]
},
{
"steps": 999978,
"file_path": "results/Huggy/Huggy/Huggy-999978.onnx",
"reward": 3.9729285319959087,
"creation_time": 1690834601.3566864,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999978.pt"
]
},
{
"steps": 1199984,
"file_path": "results/Huggy/Huggy/Huggy-1199984.onnx",
"reward": 4.172231330147272,
"creation_time": 1690834848.1661103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199984.pt"
]
},
{
"steps": 1399924,
"file_path": "results/Huggy/Huggy/Huggy-1399924.onnx",
"reward": 3.815022040830284,
"creation_time": 1690835097.2272737,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399924.pt"
]
},
{
"steps": 1599606,
"file_path": "results/Huggy/Huggy/Huggy-1599606.onnx",
"reward": 3.912286330839831,
"creation_time": 1690835349.1696305,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599606.pt"
]
},
{
"steps": 1799992,
"file_path": "results/Huggy/Huggy/Huggy-1799992.onnx",
"reward": 3.719263095408678,
"creation_time": 1690835604.6283512,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799992.pt"
]
},
{
"steps": 1999999,
"file_path": "results/Huggy/Huggy/Huggy-1999999.onnx",
"reward": 3.821419987736679,
"creation_time": 1690835864.5756783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999999.pt"
]
},
{
"steps": 2000104,
"file_path": "results/Huggy/Huggy/Huggy-2000104.onnx",
"reward": 3.866991331179937,
"creation_time": 1690835864.70064,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
],
"final_checkpoint": {
"steps": 2000104,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.866991331179937,
"creation_time": 1690835864.70064,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000104.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}