ppo-Huggy / run_logs /training_status.json
adi-vc's picture
Huggy
fca3452
{
"Huggy": {
"checkpoints": [
{
"steps": 199897,
"file_path": "results/Huggy/Huggy/Huggy-199897.onnx",
"reward": 3.610138582460808,
"creation_time": 1693946256.5165594,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199897.pt"
]
},
{
"steps": 399951,
"file_path": "results/Huggy/Huggy/Huggy-399951.onnx",
"reward": 3.6129864553610482,
"creation_time": 1693946554.0719898,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399951.pt"
]
},
{
"steps": 599928,
"file_path": "results/Huggy/Huggy/Huggy-599928.onnx",
"reward": 4.502665966749191,
"creation_time": 1693946854.8326986,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599928.pt"
]
},
{
"steps": 799908,
"file_path": "results/Huggy/Huggy/Huggy-799908.onnx",
"reward": 4.047160756779227,
"creation_time": 1693947154.9378738,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799908.pt"
]
},
{
"steps": 999986,
"file_path": "results/Huggy/Huggy/Huggy-999986.onnx",
"reward": 3.8653096571026078,
"creation_time": 1693947454.0137563,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999986.pt"
]
},
{
"steps": 1199969,
"file_path": "results/Huggy/Huggy/Huggy-1199969.onnx",
"reward": 3.8476313047938877,
"creation_time": 1693947760.8041015,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199969.pt"
]
},
{
"steps": 1399970,
"file_path": "results/Huggy/Huggy/Huggy-1399970.onnx",
"reward": 4.4526375730832415,
"creation_time": 1693948068.094869,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399970.pt"
]
},
{
"steps": 1599987,
"file_path": "results/Huggy/Huggy/Huggy-1599987.onnx",
"reward": 3.921427619396424,
"creation_time": 1693948369.8029337,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599987.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 3.7762962968060463,
"creation_time": 1693948677.2099855,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999936,
"file_path": "results/Huggy/Huggy/Huggy-1999936.onnx",
"reward": 3.3003185417341148,
"creation_time": 1693948984.137168,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999936.pt"
]
},
{
"steps": 2000032,
"file_path": "results/Huggy/Huggy/Huggy-2000032.onnx",
"reward": 3.3582604083609073,
"creation_time": 1693948984.3169198,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000032.pt"
]
}
],
"final_checkpoint": {
"steps": 2000032,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3582604083609073,
"creation_time": 1693948984.3169198,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000032.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}