ppo-Huggy / run_logs /training_status.json
kamara3k's picture
Huggy
477c01a
{
"Huggy": {
"checkpoints": [
{
"steps": 199969,
"file_path": "results/Huggy/Huggy/Huggy-199969.onnx",
"reward": 3.6478084146030367,
"creation_time": 1694197053.527555,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199969.pt"
]
},
{
"steps": 399980,
"file_path": "results/Huggy/Huggy/Huggy-399980.onnx",
"reward": 3.5818641570306595,
"creation_time": 1694197304.1682706,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399980.pt"
]
},
{
"steps": 599918,
"file_path": "results/Huggy/Huggy/Huggy-599918.onnx",
"reward": 3.773340082168579,
"creation_time": 1694197555.867237,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599918.pt"
]
},
{
"steps": 799949,
"file_path": "results/Huggy/Huggy/Huggy-799949.onnx",
"reward": 3.7247566385515807,
"creation_time": 1694197797.7741492,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799949.pt"
]
},
{
"steps": 999981,
"file_path": "results/Huggy/Huggy/Huggy-999981.onnx",
"reward": 4.151943672483212,
"creation_time": 1694198049.2817645,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999981.pt"
]
},
{
"steps": 1199995,
"file_path": "results/Huggy/Huggy/Huggy-1199995.onnx",
"reward": 3.4784832994143167,
"creation_time": 1694198298.5522938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199995.pt"
]
},
{
"steps": 1399271,
"file_path": "results/Huggy/Huggy/Huggy-1399271.onnx",
"reward": 3.8581538514598557,
"creation_time": 1694198545.1872776,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399271.pt"
]
},
{
"steps": 1599922,
"file_path": "results/Huggy/Huggy/Huggy-1599922.onnx",
"reward": 3.898691962855731,
"creation_time": 1694198807.1337082,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599922.pt"
]
},
{
"steps": 1799977,
"file_path": "results/Huggy/Huggy/Huggy-1799977.onnx",
"reward": 3.417478993771568,
"creation_time": 1694199062.8700252,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799977.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 1.884474277496338,
"creation_time": 1694199318.8165276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000010,
"file_path": "results/Huggy/Huggy/Huggy-2000010.onnx",
"reward": 2.245314598083496,
"creation_time": 1694199318.938865,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000010.pt"
]
}
],
"final_checkpoint": {
"steps": 2000010,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.245314598083496,
"creation_time": 1694199318.938865,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000010.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}