ppo-Huggy / run_logs /training_status.json
Qasim30's picture
Huggy
48970e6
{
"Huggy": {
"checkpoints": [
{
"steps": 199926,
"file_path": "results/Huggy/Huggy/Huggy-199926.onnx",
"reward": 3.3976851909879655,
"creation_time": 1687670877.417915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199926.pt"
]
},
{
"steps": 399978,
"file_path": "results/Huggy/Huggy/Huggy-399978.onnx",
"reward": 3.8091644877972812,
"creation_time": 1687671123.3741755,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399978.pt"
]
},
{
"steps": 599889,
"file_path": "results/Huggy/Huggy/Huggy-599889.onnx",
"reward": 4.156521755456924,
"creation_time": 1687671374.3205962,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599889.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.810739969755236,
"creation_time": 1687671623.9651098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999651,
"file_path": "results/Huggy/Huggy/Huggy-999651.onnx",
"reward": 4.086936276930349,
"creation_time": 1687671877.0949085,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999651.pt"
]
},
{
"steps": 1199985,
"file_path": "results/Huggy/Huggy/Huggy-1199985.onnx",
"reward": 3.8457044191556435,
"creation_time": 1687672133.620648,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199985.pt"
]
},
{
"steps": 1399891,
"file_path": "results/Huggy/Huggy/Huggy-1399891.onnx",
"reward": null,
"creation_time": 1687672389.905493,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399891.pt"
]
},
{
"steps": 1599996,
"file_path": "results/Huggy/Huggy/Huggy-1599996.onnx",
"reward": 3.949371148088125,
"creation_time": 1687672641.6278377,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599996.pt"
]
},
{
"steps": 1799995,
"file_path": "results/Huggy/Huggy/Huggy-1799995.onnx",
"reward": 3.8881374133414908,
"creation_time": 1687672895.7873473,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799995.pt"
]
},
{
"steps": 1999908,
"file_path": "results/Huggy/Huggy/Huggy-1999908.onnx",
"reward": 3.941927965297255,
"creation_time": 1687673151.7192645,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999908.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy/Huggy/Huggy-2000033.onnx",
"reward": 4.018481558019465,
"creation_time": 1687673151.9097571,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
],
"final_checkpoint": {
"steps": 2000033,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.018481558019465,
"creation_time": 1687673151.9097571,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}