ppo-Huggy / run_logs /training_status.json
ajaycompete143's picture
Huggy
925895c
{
"Huggy": {
"checkpoints": [
{
"steps": 199921,
"file_path": "results/Huggy/Huggy/Huggy-199921.onnx",
"reward": 3.416108190173834,
"creation_time": 1689588128.8415236,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199921.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy/Huggy/Huggy-399995.onnx",
"reward": 3.7819149138322516,
"creation_time": 1689588396.170889,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599865,
"file_path": "results/Huggy/Huggy/Huggy-599865.onnx",
"reward": 3.3833986167554504,
"creation_time": 1689588663.8463824,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599865.pt"
]
},
{
"steps": 799921,
"file_path": "results/Huggy/Huggy/Huggy-799921.onnx",
"reward": 3.7909297913801474,
"creation_time": 1689588929.2272513,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799921.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy/Huggy/Huggy-999955.onnx",
"reward": 3.6531854516170066,
"creation_time": 1689589198.610027,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199394,
"file_path": "results/Huggy/Huggy/Huggy-1199394.onnx",
"reward": 3.582927738626798,
"creation_time": 1689589469.871089,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199394.pt"
]
},
{
"steps": 1399938,
"file_path": "results/Huggy/Huggy/Huggy-1399938.onnx",
"reward": 3.8463813499228596,
"creation_time": 1689589739.8657727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399938.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy/Huggy/Huggy-1599944.onnx",
"reward": 3.7883728375801673,
"creation_time": 1689590020.0887911,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799951,
"file_path": "results/Huggy/Huggy/Huggy-1799951.onnx",
"reward": 3.4581402225757207,
"creation_time": 1689590299.9107554,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799951.pt"
]
},
{
"steps": 1999888,
"file_path": "results/Huggy/Huggy/Huggy-1999888.onnx",
"reward": 4.353064772559375,
"creation_time": 1689590580.8687842,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999888.pt"
]
},
{
"steps": 2000013,
"file_path": "results/Huggy/Huggy/Huggy-2000013.onnx",
"reward": 4.406240738573528,
"creation_time": 1689590581.0098655,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000013.pt"
]
}
],
"final_checkpoint": {
"steps": 2000013,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.406240738573528,
"creation_time": 1689590581.0098655,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000013.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}