ppo-Huggy / run_logs /training_status.json
hythyt's picture
Huggy
8181290 verified
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199880,
"file_path": "results/Huggy/Huggy/Huggy-199880.onnx",
"reward": 3.289783469835917,
"creation_time": 1707938826.2495482,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199880.pt"
]
},
{
"steps": 399832,
"file_path": "results/Huggy/Huggy/Huggy-399832.onnx",
"reward": 3.749866609479867,
"creation_time": 1707939069.8012047,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399832.pt"
]
},
{
"steps": 599970,
"file_path": "results/Huggy/Huggy/Huggy-599970.onnx",
"reward": 3.1132898330688477,
"creation_time": 1707939316.7675023,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599970.pt"
]
},
{
"steps": 799983,
"file_path": "results/Huggy/Huggy/Huggy-799983.onnx",
"reward": 3.8535269340338734,
"creation_time": 1707939558.4894702,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799983.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy/Huggy/Huggy-999950.onnx",
"reward": 3.7465406723111587,
"creation_time": 1707939807.6299536,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199902,
"file_path": "results/Huggy/Huggy/Huggy-1199902.onnx",
"reward": 3.963053560256958,
"creation_time": 1707940066.5313678,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199902.pt"
]
},
{
"steps": 1399986,
"file_path": "results/Huggy/Huggy/Huggy-1399986.onnx",
"reward": 3.6998505223365057,
"creation_time": 1707940317.4300346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399986.pt"
]
},
{
"steps": 1599962,
"file_path": "results/Huggy/Huggy/Huggy-1599962.onnx",
"reward": 3.703893771512168,
"creation_time": 1707940572.3274562,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599962.pt"
]
},
{
"steps": 1799932,
"file_path": "results/Huggy/Huggy/Huggy-1799932.onnx",
"reward": 3.826379196984427,
"creation_time": 1707940826.7413526,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799932.pt"
]
},
{
"steps": 1999999,
"file_path": "results/Huggy/Huggy/Huggy-1999999.onnx",
"reward": 3.5946776089460952,
"creation_time": 1707941080.758798,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999999.pt"
]
},
{
"steps": 2000116,
"file_path": "results/Huggy/Huggy/Huggy-2000116.onnx",
"reward": 3.6065225352843604,
"creation_time": 1707941080.941322,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000116.pt"
]
}
],
"final_checkpoint": {
"steps": 2000116,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6065225352843604,
"creation_time": 1707941080.941322,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000116.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.0+cu121"
}
}