ppo-Huggy / run_logs /training_status.json
Aleksey Savin
Huggy RL first commit.
3503bb6
{
"Huggy": {
"checkpoints": [
{
"steps": 199998,
"file_path": "results/Huggy/Huggy/Huggy-199998.onnx",
"reward": 3.3202436263599093,
"creation_time": 1701173049.5884235,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199998.pt"
]
},
{
"steps": 399852,
"file_path": "results/Huggy/Huggy/Huggy-399852.onnx",
"reward": 3.727994713428858,
"creation_time": 1701173286.3932629,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399852.pt"
]
},
{
"steps": 599919,
"file_path": "results/Huggy/Huggy/Huggy-599919.onnx",
"reward": 3.350984791914622,
"creation_time": 1701173526.8781104,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599919.pt"
]
},
{
"steps": 799999,
"file_path": "results/Huggy/Huggy/Huggy-799999.onnx",
"reward": 3.525209914554249,
"creation_time": 1701173764.6016726,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799999.pt"
]
},
{
"steps": 999927,
"file_path": "results/Huggy/Huggy/Huggy-999927.onnx",
"reward": 3.7663767836310647,
"creation_time": 1701174005.7590497,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999927.pt"
]
},
{
"steps": 1199630,
"file_path": "results/Huggy/Huggy/Huggy-1199630.onnx",
"reward": 4.304585321744283,
"creation_time": 1701174248.866761,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199630.pt"
]
},
{
"steps": 1399954,
"file_path": "results/Huggy/Huggy/Huggy-1399954.onnx",
"reward": 4.07647689183553,
"creation_time": 1701174483.8577297,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399954.pt"
]
},
{
"steps": 1599888,
"file_path": "results/Huggy/Huggy/Huggy-1599888.onnx",
"reward": 3.9229658553205384,
"creation_time": 1701174715.814181,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599888.pt"
]
},
{
"steps": 1799642,
"file_path": "results/Huggy/Huggy/Huggy-1799642.onnx",
"reward": 3.755851839648353,
"creation_time": 1701174949.856702,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799642.pt"
]
},
{
"steps": 1999895,
"file_path": "results/Huggy/Huggy/Huggy-1999895.onnx",
"reward": 4.333392669757207,
"creation_time": 1701175185.3223095,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999895.pt"
]
},
{
"steps": 2000003,
"file_path": "results/Huggy/Huggy/Huggy-2000003.onnx",
"reward": 4.29419264793396,
"creation_time": 1701175185.4380727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
],
"final_checkpoint": {
"steps": 2000003,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.29419264793396,
"creation_time": 1701175185.4380727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000003.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}