ppo-Huggy / run_logs /training_status.json
jaygdesai's picture
Huggy
422a23c
{
"Huggy": {
"checkpoints": [
{
"steps": 199958,
"file_path": "results/Huggy/Huggy/Huggy-199958.onnx",
"reward": 3.587126120444267,
"creation_time": 1689868951.4545321,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199958.pt"
]
},
{
"steps": 399884,
"file_path": "results/Huggy/Huggy/Huggy-399884.onnx",
"reward": 3.7172725640810453,
"creation_time": 1689869211.403046,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399884.pt"
]
},
{
"steps": 599975,
"file_path": "results/Huggy/Huggy/Huggy-599975.onnx",
"reward": 4.088218867778778,
"creation_time": 1689869478.850018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599975.pt"
]
},
{
"steps": 799933,
"file_path": "results/Huggy/Huggy/Huggy-799933.onnx",
"reward": 3.998140384956282,
"creation_time": 1689869744.860215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799933.pt"
]
},
{
"steps": 999978,
"file_path": "results/Huggy/Huggy/Huggy-999978.onnx",
"reward": 4.220335966729103,
"creation_time": 1689870011.7217224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999978.pt"
]
},
{
"steps": 1199944,
"file_path": "results/Huggy/Huggy/Huggy-1199944.onnx",
"reward": 4.0221078081797526,
"creation_time": 1689870281.739715,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199944.pt"
]
},
{
"steps": 1399746,
"file_path": "results/Huggy/Huggy/Huggy-1399746.onnx",
"reward": 3.5717679762071177,
"creation_time": 1689870547.7449253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399746.pt"
]
},
{
"steps": 1599953,
"file_path": "results/Huggy/Huggy/Huggy-1599953.onnx",
"reward": 3.8646079463083387,
"creation_time": 1689870806.6242511,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599953.pt"
]
},
{
"steps": 1799993,
"file_path": "results/Huggy/Huggy/Huggy-1799993.onnx",
"reward": 3.8087546257522162,
"creation_time": 1689871068.051962,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799993.pt"
]
},
{
"steps": 1999982,
"file_path": "results/Huggy/Huggy/Huggy-1999982.onnx",
"reward": 3.8218713628523275,
"creation_time": 1689871334.418976,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999982.pt"
]
},
{
"steps": 2000081,
"file_path": "results/Huggy/Huggy/Huggy-2000081.onnx",
"reward": 3.845395532116961,
"creation_time": 1689871334.5567615,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000081.pt"
]
}
],
"final_checkpoint": {
"steps": 2000081,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.845395532116961,
"creation_time": 1689871334.5567615,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000081.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}