ppo-Huggy / run_logs /training_status.json
tabbit's picture
Huggy
bb1570f
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199918,
"file_path": "results/Huggy/Huggy/Huggy-199918.onnx",
"reward": 3.8245207306110496,
"creation_time": 1692588652.8782597,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199918.pt"
]
},
{
"steps": 399963,
"file_path": "results/Huggy/Huggy/Huggy-399963.onnx",
"reward": 3.9317543678364513,
"creation_time": 1692588929.9986727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399963.pt"
]
},
{
"steps": 599956,
"file_path": "results/Huggy/Huggy/Huggy-599956.onnx",
"reward": 3.7375975449879966,
"creation_time": 1692589216.6732059,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599956.pt"
]
},
{
"steps": 799793,
"file_path": "results/Huggy/Huggy/Huggy-799793.onnx",
"reward": 4.10736581651788,
"creation_time": 1692589497.262962,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799793.pt"
]
},
{
"steps": 999979,
"file_path": "results/Huggy/Huggy/Huggy-999979.onnx",
"reward": 3.7109181866501317,
"creation_time": 1692589785.090783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999979.pt"
]
},
{
"steps": 1199839,
"file_path": "results/Huggy/Huggy/Huggy-1199839.onnx",
"reward": 3.722981758739637,
"creation_time": 1692590071.2290647,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199839.pt"
]
},
{
"steps": 1399810,
"file_path": "results/Huggy/Huggy/Huggy-1399810.onnx",
"reward": 4.2731542706489565,
"creation_time": 1692590365.1833413,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399810.pt"
]
},
{
"steps": 1599990,
"file_path": "results/Huggy/Huggy/Huggy-1599990.onnx",
"reward": 3.8045808827238425,
"creation_time": 1692590651.8216484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599990.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 3.9870643579870237,
"creation_time": 1692590939.116139,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999972,
"file_path": "results/Huggy/Huggy/Huggy-1999972.onnx",
"reward": 4.167931413650512,
"creation_time": 1692591228.8640795,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999972.pt"
]
},
{
"steps": 2000007,
"file_path": "results/Huggy/Huggy/Huggy-2000007.onnx",
"reward": 4.13758712083521,
"creation_time": 1692591228.9964104,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
],
"final_checkpoint": {
"steps": 2000007,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.13758712083521,
"creation_time": 1692591228.9964104,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}