ppo-Huggy / run_logs /training_status.json
dcv21's picture
Huggy
b9d4caf
{
"Huggy": {
"checkpoints": [
{
"steps": 199955,
"file_path": "results/Huggy/Huggy/Huggy-199955.onnx",
"reward": 3.4756176359951496,
"creation_time": 1692181093.9220848,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199955.pt"
]
},
{
"steps": 399940,
"file_path": "results/Huggy/Huggy/Huggy-399940.onnx",
"reward": 3.804645812686752,
"creation_time": 1692181338.6402905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399940.pt"
]
},
{
"steps": 599841,
"file_path": "results/Huggy/Huggy/Huggy-599841.onnx",
"reward": 3.9530864357948303,
"creation_time": 1692181584.6579237,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599841.pt"
]
},
{
"steps": 799991,
"file_path": "results/Huggy/Huggy/Huggy-799991.onnx",
"reward": 4.019764256082187,
"creation_time": 1692181835.1102452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799991.pt"
]
},
{
"steps": 999937,
"file_path": "results/Huggy/Huggy/Huggy-999937.onnx",
"reward": 3.7697907883351243,
"creation_time": 1692182085.2191947,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999937.pt"
]
},
{
"steps": 1199965,
"file_path": "results/Huggy/Huggy/Huggy-1199965.onnx",
"reward": 3.704379585481459,
"creation_time": 1692182336.8372157,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199965.pt"
]
},
{
"steps": 1399896,
"file_path": "results/Huggy/Huggy/Huggy-1399896.onnx",
"reward": 3.768630704818628,
"creation_time": 1692182575.663738,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399896.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy/Huggy/Huggy-1599998.onnx",
"reward": 3.8186429262866635,
"creation_time": 1692182819.3241985,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799949,
"file_path": "results/Huggy/Huggy/Huggy-1799949.onnx",
"reward": 4.026723747612328,
"creation_time": 1692183065.539368,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799949.pt"
]
},
{
"steps": 1999287,
"file_path": "results/Huggy/Huggy/Huggy-1999287.onnx",
"reward": 3.8626917973160744,
"creation_time": 1692183307.5366924,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999287.pt"
]
},
{
"steps": 2000037,
"file_path": "results/Huggy/Huggy/Huggy-2000037.onnx",
"reward": 3.4251081592896404,
"creation_time": 1692183307.6832604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000037.pt"
]
}
],
"final_checkpoint": {
"steps": 2000037,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4251081592896404,
"creation_time": 1692183307.6832604,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000037.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}