ppo-Huggy / run_logs /training_status.json
vrajur's picture
Huggy
f601c51
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
},
"Huggy": {
"checkpoints": [
{
"steps": 0,
"file_path": "results/Huggy/Huggy/Huggy-0.onnx",
"reward": null,
"creation_time": 1690978418.4084682,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-0.pt"
]
},
{
"steps": 199798,
"file_path": "results/Huggy/Huggy/Huggy-199798.onnx",
"reward": 3.4398733424205408,
"creation_time": 1690978710.7360342,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199798.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 3.566782024868748,
"creation_time": 1690978992.5422745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599888,
"file_path": "results/Huggy/Huggy/Huggy-599888.onnx",
"reward": 3.0129279792308807,
"creation_time": 1690979283.4747436,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599888.pt"
]
},
{
"steps": 799474,
"file_path": "results/Huggy/Huggy/Huggy-799474.onnx",
"reward": 3.507156575471163,
"creation_time": 1690979562.0779057,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799474.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy/Huggy/Huggy-999960.onnx",
"reward": 3.8419013134268827,
"creation_time": 1690979847.8573036,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199939,
"file_path": "results/Huggy/Huggy/Huggy-1199939.onnx",
"reward": 4.005099384223714,
"creation_time": 1690980110.9814348,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199939.pt"
]
},
{
"steps": 1399870,
"file_path": "results/Huggy/Huggy/Huggy-1399870.onnx",
"reward": 3.3632095863944604,
"creation_time": 1690980365.7795482,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399870.pt"
]
},
{
"steps": 1599938,
"file_path": "results/Huggy/Huggy/Huggy-1599938.onnx",
"reward": 3.693296845279523,
"creation_time": 1690980625.391882,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599938.pt"
]
},
{
"steps": 1799937,
"file_path": "results/Huggy/Huggy/Huggy-1799937.onnx",
"reward": 4.2752286195755005,
"creation_time": 1690980884.1048677,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799937.pt"
]
},
{
"steps": 1999962,
"file_path": "results/Huggy/Huggy/Huggy-1999962.onnx",
"reward": 2.951528310775757,
"creation_time": 1690981135.3066685,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999962.pt"
]
},
{
"steps": 2000047,
"file_path": "results/Huggy/Huggy/Huggy-2000047.onnx",
"reward": 3.227987861633301,
"creation_time": 1690981135.5025735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000047.pt"
]
}
],
"final_checkpoint": {
"steps": 2000047,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.227987861633301,
"creation_time": 1690981135.5025735,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000047.pt"
]
}
}
}