ppo-Huggy / run_logs /training_status.json
Xxmlala's picture
Huggy
02a09e3
{
"Huggy": {
"checkpoints": [
{
"steps": 199865,
"file_path": "results/Huggy/Huggy/Huggy-199865.onnx",
"reward": 3.212613576748332,
"creation_time": 1689420216.7176747,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199865.pt"
]
},
{
"steps": 399941,
"file_path": "results/Huggy/Huggy/Huggy-399941.onnx",
"reward": 4.1291929880778,
"creation_time": 1689420474.4484484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399941.pt"
]
},
{
"steps": 599937,
"file_path": "results/Huggy/Huggy/Huggy-599937.onnx",
"reward": 3.4123923306663833,
"creation_time": 1689420724.152459,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599937.pt"
]
},
{
"steps": 799900,
"file_path": "results/Huggy/Huggy/Huggy-799900.onnx",
"reward": 3.969625955727434,
"creation_time": 1689420972.5137959,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799900.pt"
]
},
{
"steps": 999904,
"file_path": "results/Huggy/Huggy/Huggy-999904.onnx",
"reward": 3.7479896906396033,
"creation_time": 1689421221.2416952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999904.pt"
]
},
{
"steps": 1199899,
"file_path": "results/Huggy/Huggy/Huggy-1199899.onnx",
"reward": 4.096346643838015,
"creation_time": 1689421471.1593926,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199899.pt"
]
},
{
"steps": 1399873,
"file_path": "results/Huggy/Huggy/Huggy-1399873.onnx",
"reward": 3.536507981794852,
"creation_time": 1689421718.8250842,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399873.pt"
]
},
{
"steps": 1599907,
"file_path": "results/Huggy/Huggy/Huggy-1599907.onnx",
"reward": 3.9444355933962973,
"creation_time": 1689421962.878871,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599907.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy/Huggy/Huggy-1799959.onnx",
"reward": 3.912655770421742,
"creation_time": 1689422212.475553,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999948,
"file_path": "results/Huggy/Huggy/Huggy-1999948.onnx",
"reward": 4.029140853180605,
"creation_time": 1689422459.4360254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999948.pt"
]
},
{
"steps": 2000001,
"file_path": "results/Huggy/Huggy/Huggy-2000001.onnx",
"reward": 4.001808265613955,
"creation_time": 1689422459.5575964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000001.pt"
]
}
],
"final_checkpoint": {
"steps": 2000001,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.001808265613955,
"creation_time": 1689422459.5575964,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000001.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}