ppo-Huggy / run_logs /training_status.json
stanidiener's picture
Huggy
df46184
{
"Huggy": {
"checkpoints": [
{
"steps": 199714,
"file_path": "results/Huggy/Huggy/Huggy-199714.onnx",
"reward": 3.4116748152300715,
"creation_time": 1694701167.800443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199714.pt"
]
},
{
"steps": 399895,
"file_path": "results/Huggy/Huggy/Huggy-399895.onnx",
"reward": 3.5149236420790353,
"creation_time": 1694701406.4343202,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399895.pt"
]
},
{
"steps": 599889,
"file_path": "results/Huggy/Huggy/Huggy-599889.onnx",
"reward": 4.057368457317352,
"creation_time": 1694701661.7873213,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599889.pt"
]
},
{
"steps": 799975,
"file_path": "results/Huggy/Huggy/Huggy-799975.onnx",
"reward": 3.7653327630337885,
"creation_time": 1694701913.9384618,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799975.pt"
]
},
{
"steps": 999943,
"file_path": "results/Huggy/Huggy/Huggy-999943.onnx",
"reward": 4.073585124526705,
"creation_time": 1694702164.1158369,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999943.pt"
]
},
{
"steps": 1199968,
"file_path": "results/Huggy/Huggy/Huggy-1199968.onnx",
"reward": 4.191359781857693,
"creation_time": 1694702420.7849152,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199968.pt"
]
},
{
"steps": 1399984,
"file_path": "results/Huggy/Huggy/Huggy-1399984.onnx",
"reward": 4.078478860855102,
"creation_time": 1694702676.7030983,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399984.pt"
]
},
{
"steps": 1599580,
"file_path": "results/Huggy/Huggy/Huggy-1599580.onnx",
"reward": 3.6906431701761733,
"creation_time": 1694702923.0511806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599580.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 4.0325907559621905,
"creation_time": 1694703176.7255926,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999947,
"file_path": "results/Huggy/Huggy/Huggy-1999947.onnx",
"reward": 3.899360568182809,
"creation_time": 1694703429.9252238,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999947.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy/Huggy/Huggy-2000043.onnx",
"reward": 3.887424541844262,
"creation_time": 1694703430.0747921,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.887424541844262,
"creation_time": 1694703430.0747921,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}