ppo-Huggy / run_logs /training_status.json
Raaniel's picture
Huggy
c27d832
{
"Huggy": {
"checkpoints": [
{
"steps": 2599842,
"file_path": "results/Huggy/Huggy/Huggy-2599842.onnx",
"reward": 3.6886503630214267,
"creation_time": 1687612370.3622904,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2599842.pt"
]
},
{
"steps": 2799961,
"file_path": "results/Huggy/Huggy/Huggy-2799961.onnx",
"reward": 3.808251648421449,
"creation_time": 1687612606.1486096,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2799961.pt"
]
},
{
"steps": 2999952,
"file_path": "results/Huggy/Huggy/Huggy-2999952.onnx",
"reward": 3.9011522956558915,
"creation_time": 1687612843.1417572,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2999952.pt"
]
},
{
"steps": 3199963,
"file_path": "results/Huggy/Huggy/Huggy-3199963.onnx",
"reward": 3.8112009167671204,
"creation_time": 1687613085.2522066,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3199963.pt"
]
},
{
"steps": 3399973,
"file_path": "results/Huggy/Huggy/Huggy-3399973.onnx",
"reward": 3.7111746007449007,
"creation_time": 1687613329.705403,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3399973.pt"
]
},
{
"steps": 3599918,
"file_path": "results/Huggy/Huggy/Huggy-3599918.onnx",
"reward": 3.8578032648397818,
"creation_time": 1687613566.8007278,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3599918.pt"
]
},
{
"steps": 3799970,
"file_path": "results/Huggy/Huggy/Huggy-3799970.onnx",
"reward": 3.7363014698028563,
"creation_time": 1687613817.8682225,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3799970.pt"
]
},
{
"steps": 3999411,
"file_path": "results/Huggy/Huggy/Huggy-3999411.onnx",
"reward": 3.617385478841292,
"creation_time": 1687614051.737577,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3999411.pt"
]
},
{
"steps": 3999975,
"file_path": "results/Huggy/Huggy/Huggy-3999975.onnx",
"reward": 2.8365768909454347,
"creation_time": 1687614211.6796942,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-3999975.pt"
]
},
{
"steps": 4199917,
"file_path": "results/Huggy/Huggy/Huggy-4199917.onnx",
"reward": 3.9757096196564152,
"creation_time": 1687614445.8863084,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-4199917.pt"
]
},
{
"steps": 4399810,
"file_path": "results/Huggy/Huggy/Huggy-4399810.onnx",
"reward": 3.813748516355242,
"creation_time": 1687614682.233683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-4399810.pt"
]
},
{
"steps": 4599958,
"file_path": "results/Huggy/Huggy/Huggy-4599958.onnx",
"reward": 4.114492069591176,
"creation_time": 1687614926.5918233,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-4599958.pt"
]
},
{
"steps": 4799991,
"file_path": "results/Huggy/Huggy/Huggy-4799991.onnx",
"reward": 3.815932668007172,
"creation_time": 1687615161.9041271,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-4799991.pt"
]
},
{
"steps": 4999998,
"file_path": "results/Huggy/Huggy/Huggy-4999998.onnx",
"reward": 4.103228949175941,
"creation_time": 1687615396.2099924,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-4999998.pt"
]
},
{
"steps": 5000073,
"file_path": "results/Huggy/Huggy/Huggy-5000073.onnx",
"reward": 4.112955032464336,
"creation_time": 1687615396.4024131,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-5000073.pt"
]
}
],
"final_checkpoint": {
"steps": 5000073,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.112955032464336,
"creation_time": 1687615396.4024131,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-5000073.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}