ppo-Huggy / run_logs /training_status.json
michael-go's picture
Huggy
7c58b7d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199915,
"file_path": "results/Huggy2/Huggy/Huggy-199915.onnx",
"reward": 3.4860387203986183,
"creation_time": 1727844192.231332,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199915.pt"
]
},
{
"steps": 399950,
"file_path": "results/Huggy2/Huggy/Huggy-399950.onnx",
"reward": 3.5674880155876503,
"creation_time": 1727844434.7098818,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399950.pt"
]
},
{
"steps": 599974,
"file_path": "results/Huggy2/Huggy/Huggy-599974.onnx",
"reward": 4.153600981360988,
"creation_time": 1727844679.5546281,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599974.pt"
]
},
{
"steps": 799961,
"file_path": "results/Huggy2/Huggy/Huggy-799961.onnx",
"reward": 3.9558535958781387,
"creation_time": 1727844925.1648338,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799961.pt"
]
},
{
"steps": 999988,
"file_path": "results/Huggy2/Huggy/Huggy-999988.onnx",
"reward": 4.05124282086337,
"creation_time": 1727845177.2226052,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999988.pt"
]
},
{
"steps": 1199969,
"file_path": "results/Huggy2/Huggy/Huggy-1199969.onnx",
"reward": 4.221821241989368,
"creation_time": 1727845424.0135052,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199969.pt"
]
},
{
"steps": 1399972,
"file_path": "results/Huggy2/Huggy/Huggy-1399972.onnx",
"reward": 3.9377538363138833,
"creation_time": 1727845671.5386527,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399972.pt"
]
},
{
"steps": 1599883,
"file_path": "results/Huggy2/Huggy/Huggy-1599883.onnx",
"reward": 3.8424042917863286,
"creation_time": 1727845920.1958244,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599883.pt"
]
},
{
"steps": 1799971,
"file_path": "results/Huggy2/Huggy/Huggy-1799971.onnx",
"reward": 4.086743072070907,
"creation_time": 1727846174.1702254,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799971.pt"
]
},
{
"steps": 1999959,
"file_path": "results/Huggy2/Huggy/Huggy-1999959.onnx",
"reward": 3.5188636546549588,
"creation_time": 1727846428.8546238,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999959.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy2/Huggy/Huggy-2000030.onnx",
"reward": 3.5233401415195873,
"creation_time": 1727846428.9754732,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.5233401415195873,
"creation_time": 1727846428.9754732,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000030.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.4.1+cu121"
}
}