ppo-HuggyMyBeloved / run_logs /training_status.json
Vadu's picture
Huggy's first hug
e348b6e
{
"Huggy": {
"checkpoints": [
{
"steps": 199970,
"file_path": "results/Huggy/Huggy/Huggy-199970.onnx",
"reward": 3.1965434013820087,
"creation_time": 1694355138.7336493,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199970.pt"
]
},
{
"steps": 399848,
"file_path": "results/Huggy/Huggy/Huggy-399848.onnx",
"reward": 3.9273217846365536,
"creation_time": 1694355377.00932,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399848.pt"
]
},
{
"steps": 599981,
"file_path": "results/Huggy/Huggy/Huggy-599981.onnx",
"reward": 3.521595829411557,
"creation_time": 1694355618.029254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599981.pt"
]
},
{
"steps": 799948,
"file_path": "results/Huggy/Huggy/Huggy-799948.onnx",
"reward": 3.919055172783172,
"creation_time": 1694355853.8054352,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799948.pt"
]
},
{
"steps": 999976,
"file_path": "results/Huggy/Huggy/Huggy-999976.onnx",
"reward": 3.7764031222233405,
"creation_time": 1694356097.4930692,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999976.pt"
]
},
{
"steps": 1199330,
"file_path": "results/Huggy/Huggy/Huggy-1199330.onnx",
"reward": 3.7342122440988366,
"creation_time": 1694356346.8552804,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199330.pt"
]
},
{
"steps": 1399941,
"file_path": "results/Huggy/Huggy/Huggy-1399941.onnx",
"reward": null,
"creation_time": 1694356593.0284238,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399941.pt"
]
},
{
"steps": 1599936,
"file_path": "results/Huggy/Huggy/Huggy-1599936.onnx",
"reward": 4.057188367164588,
"creation_time": 1694356839.1348808,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599936.pt"
]
},
{
"steps": 1799893,
"file_path": "results/Huggy/Huggy/Huggy-1799893.onnx",
"reward": 3.769098309838042,
"creation_time": 1694357090.0042963,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799893.pt"
]
},
{
"steps": 1999962,
"file_path": "results/Huggy/Huggy/Huggy-1999962.onnx",
"reward": 4.074796613524942,
"creation_time": 1694357334.9502585,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999962.pt"
]
},
{
"steps": 2000031,
"file_path": "results/Huggy/Huggy/Huggy-2000031.onnx",
"reward": 4.084180321012225,
"creation_time": 1694357335.07567,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000031.pt"
]
}
],
"final_checkpoint": {
"steps": 2000031,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.084180321012225,
"creation_time": 1694357335.07567,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000031.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}