ppo-Huggy / run_logs /training_status.json
sagravela's picture
Huggy's Project
b121b71 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199919,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-199919.onnx",
"reward": 3.6754822310279396,
"creation_time": 1710335716.8899798,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-199919.pt"
]
},
{
"steps": 399985,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-399985.onnx",
"reward": 3.6647883009910585,
"creation_time": 1710335957.0645928,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-399985.pt"
]
},
{
"steps": 599956,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-599956.onnx",
"reward": 2.9511585235595703,
"creation_time": 1710336197.5853138,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-599956.pt"
]
},
{
"steps": 799944,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-799944.onnx",
"reward": 3.9521447148903337,
"creation_time": 1710336442.726592,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-799944.pt"
]
},
{
"steps": 999945,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-999945.onnx",
"reward": 4.01322930857418,
"creation_time": 1710336689.8025064,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-999945.pt"
]
},
{
"steps": 1199904,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-1199904.onnx",
"reward": 3.9759333355952116,
"creation_time": 1710336936.9996374,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-1199904.pt"
]
},
{
"steps": 1399969,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-1399969.onnx",
"reward": 3.785912818303614,
"creation_time": 1710337181.3920689,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-1399969.pt"
]
},
{
"steps": 1599945,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-1599945.onnx",
"reward": 3.947018454100359,
"creation_time": 1710337430.3012147,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-1599945.pt"
]
},
{
"steps": 1799977,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-1799977.onnx",
"reward": 3.8863415352188713,
"creation_time": 1710337678.6743562,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-1799977.pt"
]
},
{
"steps": 1999971,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-1999971.onnx",
"reward": 3.4180079732622417,
"creation_time": 1710337925.280347,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-1999971.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy_sagravela/Huggy/Huggy-2000041.onnx",
"reward": 3.4406260516908436,
"creation_time": 1710337925.4639072,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy_sagravela/Huggy.onnx",
"reward": 3.4406260516908436,
"creation_time": 1710337925.4639072,
"auxillary_file_paths": [
"results/Huggy_sagravela/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}