Huggy / run_logs /training_status.json
ExusBurn's picture
Huggy
35adfa2 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199930,
"file_path": "results/Huggy2/Huggy/Huggy-199930.onnx",
"reward": 3.3405155817667644,
"creation_time": 1709281218.730352,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199930.pt"
]
},
{
"steps": 399564,
"file_path": "results/Huggy2/Huggy/Huggy-399564.onnx",
"reward": 3.772237629923102,
"creation_time": 1709281441.4549701,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399564.pt"
]
},
{
"steps": 599290,
"file_path": "results/Huggy2/Huggy/Huggy-599290.onnx",
"reward": 4.409993854436007,
"creation_time": 1709281665.8979497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599290.pt"
]
},
{
"steps": 799951,
"file_path": "results/Huggy2/Huggy/Huggy-799951.onnx",
"reward": 3.9415942894087896,
"creation_time": 1709281890.294604,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799951.pt"
]
},
{
"steps": 999995,
"file_path": "results/Huggy2/Huggy/Huggy-999995.onnx",
"reward": 3.9164469506380692,
"creation_time": 1709282116.5750282,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999995.pt"
]
},
{
"steps": 1199939,
"file_path": "results/Huggy2/Huggy/Huggy-1199939.onnx",
"reward": 3.7916917194399917,
"creation_time": 1709282344.8885806,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199939.pt"
]
},
{
"steps": 1399993,
"file_path": "results/Huggy2/Huggy/Huggy-1399993.onnx",
"reward": null,
"creation_time": 1709282574.6422436,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399993.pt"
]
},
{
"steps": 1599937,
"file_path": "results/Huggy2/Huggy/Huggy-1599937.onnx",
"reward": 3.5743831415206957,
"creation_time": 1709282801.7482378,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599937.pt"
]
},
{
"steps": 1799977,
"file_path": "results/Huggy2/Huggy/Huggy-1799977.onnx",
"reward": 3.9636526759465536,
"creation_time": 1709283032.2983534,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799977.pt"
]
},
{
"steps": 1999433,
"file_path": "results/Huggy2/Huggy/Huggy-1999433.onnx",
"reward": 5.353515011923654,
"creation_time": 1709283259.2451909,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999433.pt"
]
},
{
"steps": 2000183,
"file_path": "results/Huggy2/Huggy/Huggy-2000183.onnx",
"reward": 4.277905374765396,
"creation_time": 1709283259.384991,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000183.pt"
]
}
],
"final_checkpoint": {
"steps": 2000183,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.277905374765396,
"creation_time": 1709283259.384991,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000183.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}