ppo-Huggy / run_logs /training_status.json
ann-ie's picture
Huggy
8144a35 verified
raw
history blame
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199893,
"file_path": "results/Huggy2/Huggy/Huggy-199893.onnx",
"reward": 3.4187544425328573,
"creation_time": 1712403743.9582808,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199893.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy2/Huggy/Huggy-399985.onnx",
"reward": 3.9839718063672382,
"creation_time": 1712403994.0006962,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599368,
"file_path": "results/Huggy2/Huggy/Huggy-599368.onnx",
"reward": 4.69471387565136,
"creation_time": 1712404246.946806,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599368.pt"
]
},
{
"steps": 799968,
"file_path": "results/Huggy2/Huggy/Huggy-799968.onnx",
"reward": 3.638436005890735,
"creation_time": 1712404501.5252237,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799968.pt"
]
},
{
"steps": 999907,
"file_path": "results/Huggy2/Huggy/Huggy-999907.onnx",
"reward": 3.8742815824865384,
"creation_time": 1712404760.5681543,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999907.pt"
]
},
{
"steps": 1199977,
"file_path": "results/Huggy2/Huggy/Huggy-1199977.onnx",
"reward": 4.116980135440826,
"creation_time": 1712405020.679762,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199977.pt"
]
},
{
"steps": 1399991,
"file_path": "results/Huggy2/Huggy/Huggy-1399991.onnx",
"reward": 3.851267882864526,
"creation_time": 1712405277.2499921,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399991.pt"
]
},
{
"steps": 1599938,
"file_path": "results/Huggy2/Huggy/Huggy-1599938.onnx",
"reward": 3.793461036828398,
"creation_time": 1712405541.3523607,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599938.pt"
]
},
{
"steps": 1799947,
"file_path": "results/Huggy2/Huggy/Huggy-1799947.onnx",
"reward": 3.6661313881999567,
"creation_time": 1712405799.1661663,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799947.pt"
]
},
{
"steps": 1999914,
"file_path": "results/Huggy2/Huggy/Huggy-1999914.onnx",
"reward": 3.07423893143149,
"creation_time": 1712406058.7154973,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999914.pt"
]
},
{
"steps": 2000664,
"file_path": "results/Huggy2/Huggy/Huggy-2000664.onnx",
"reward": 2.824823339780172,
"creation_time": 1712406059.1141603,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000664.pt"
]
}
],
"final_checkpoint": {
"steps": 2000664,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.824823339780172,
"creation_time": 1712406059.1141603,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000664.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}