huggy_v1 / run_logs /training_status.json
tanmayyyj's picture
Huggy the doggy
785fab5
{
"Huggy": {
"checkpoints": [
{
"steps": 199620,
"file_path": "results/Huggy/Huggy/Huggy-199620.onnx",
"reward": 3.232119880025349,
"creation_time": 1686585166.5010011,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199620.pt"
]
},
{
"steps": 399977,
"file_path": "results/Huggy/Huggy/Huggy-399977.onnx",
"reward": 3.3108050073346784,
"creation_time": 1686585441.3587823,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399977.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy/Huggy/Huggy-599966.onnx",
"reward": 3.499898388981819,
"creation_time": 1686585717.9110851,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799493,
"file_path": "results/Huggy/Huggy/Huggy-799493.onnx",
"reward": 3.829534178010879,
"creation_time": 1686585991.477785,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799493.pt"
]
},
{
"steps": 999908,
"file_path": "results/Huggy/Huggy/Huggy-999908.onnx",
"reward": 4.1217616733450155,
"creation_time": 1686586270.549174,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999908.pt"
]
},
{
"steps": 1199445,
"file_path": "results/Huggy/Huggy/Huggy-1199445.onnx",
"reward": 3.5004016865383494,
"creation_time": 1686586544.9443614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199445.pt"
]
},
{
"steps": 1399940,
"file_path": "results/Huggy/Huggy/Huggy-1399940.onnx",
"reward": 3.8844677450951566,
"creation_time": 1686586814.5105834,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399940.pt"
]
},
{
"steps": 1599979,
"file_path": "results/Huggy/Huggy/Huggy-1599979.onnx",
"reward": 3.744722840189934,
"creation_time": 1686587089.6703227,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599979.pt"
]
},
{
"steps": 1799986,
"file_path": "results/Huggy/Huggy/Huggy-1799986.onnx",
"reward": 3.7238030282544416,
"creation_time": 1686587363.650078,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799986.pt"
]
},
{
"steps": 1999997,
"file_path": "results/Huggy/Huggy/Huggy-1999997.onnx",
"reward": 4.497293985806978,
"creation_time": 1686587640.846116,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999997.pt"
]
},
{
"steps": 2000097,
"file_path": "results/Huggy/Huggy/Huggy-2000097.onnx",
"reward": 4.518709659576416,
"creation_time": 1686587640.974987,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000097.pt"
]
}
],
"final_checkpoint": {
"steps": 2000097,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.518709659576416,
"creation_time": 1686587640.974987,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000097.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}