ppo-Huggy / run_logs /training_status.json
abigail8n21's picture
Huggy
f9bc5d9 verified
raw
history blame
4.4 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199857,
"file_path": "results/Huggy2/Huggy/Huggy-199857.onnx",
"reward": 3.4076933787419246,
"creation_time": 1716400092.9449077,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199857.pt"
]
},
{
"steps": 399990,
"file_path": "results/Huggy2/Huggy/Huggy-399990.onnx",
"reward": 3.866133919784001,
"creation_time": 1716400536.678272,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399990.pt"
]
},
{
"steps": 599949,
"file_path": "results/Huggy2/Huggy/Huggy-599949.onnx",
"reward": 4.548809501859877,
"creation_time": 1716400988.678131,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599949.pt"
]
},
{
"steps": 799970,
"file_path": "results/Huggy2/Huggy/Huggy-799970.onnx",
"reward": 3.825235230978145,
"creation_time": 1716401425.4300518,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799970.pt"
]
},
{
"steps": 999991,
"file_path": "results/Huggy2/Huggy/Huggy-999991.onnx",
"reward": 4.030696034893509,
"creation_time": 1716401873.4166853,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999991.pt"
]
},
{
"steps": 1199807,
"file_path": "results/Huggy2/Huggy/Huggy-1199807.onnx",
"reward": 3.9844737749501884,
"creation_time": 1716402330.854186,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199807.pt"
]
},
{
"steps": 1399891,
"file_path": "results/Huggy2/Huggy/Huggy-1399891.onnx",
"reward": 4.074730937297527,
"creation_time": 1716402787.588629,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399891.pt"
]
},
{
"steps": 1599911,
"file_path": "results/Huggy2/Huggy/Huggy-1599911.onnx",
"reward": 3.833836986020554,
"creation_time": 1716403232.1784034,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599911.pt"
]
},
{
"steps": 1799352,
"file_path": "results/Huggy2/Huggy/Huggy-1799352.onnx",
"reward": 3.6956543379682834,
"creation_time": 1716403720.8154054,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799352.pt"
]
},
{
"steps": 1999391,
"file_path": "results/Huggy2/Huggy/Huggy-1999391.onnx",
"reward": 4.066869376821721,
"creation_time": 1716404192.8504376,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999391.pt"
]
},
{
"steps": 2000141,
"file_path": "results/Huggy2/Huggy/Huggy-2000141.onnx",
"reward": 3.8991721235215664,
"creation_time": 1716404193.024106,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000141.pt"
]
}
],
"final_checkpoint": {
"steps": 2000141,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8991721235215664,
"creation_time": 1716404193.024106,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000141.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}