ppo-Huggy / run_logs /training_status.json
TahaBa's picture
Huggy
c922490 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199844,
"file_path": "results/Huggy2/Huggy/Huggy-199844.onnx",
"reward": 3.4751482475887645,
"creation_time": 1722437432.4514267,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199844.pt"
]
},
{
"steps": 399911,
"file_path": "results/Huggy2/Huggy/Huggy-399911.onnx",
"reward": 3.589610852978446,
"creation_time": 1722437693.8319166,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399911.pt"
]
},
{
"steps": 599973,
"file_path": "results/Huggy2/Huggy/Huggy-599973.onnx",
"reward": 3.9508340706427894,
"creation_time": 1722437951.0096207,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599973.pt"
]
},
{
"steps": 799843,
"file_path": "results/Huggy2/Huggy/Huggy-799843.onnx",
"reward": 3.8783181970601253,
"creation_time": 1722438206.1726074,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799843.pt"
]
},
{
"steps": 999977,
"file_path": "results/Huggy2/Huggy/Huggy-999977.onnx",
"reward": 3.9542041048407555,
"creation_time": 1722438461.4291937,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999977.pt"
]
},
{
"steps": 1199941,
"file_path": "results/Huggy2/Huggy/Huggy-1199941.onnx",
"reward": 3.7981058761130932,
"creation_time": 1722438719.6124988,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199941.pt"
]
},
{
"steps": 1399961,
"file_path": "results/Huggy2/Huggy/Huggy-1399961.onnx",
"reward": 3.753734791278839,
"creation_time": 1722438981.170548,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399961.pt"
]
},
{
"steps": 1599977,
"file_path": "results/Huggy2/Huggy/Huggy-1599977.onnx",
"reward": 3.898475118795363,
"creation_time": 1722439238.129373,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599977.pt"
]
},
{
"steps": 1799931,
"file_path": "results/Huggy2/Huggy/Huggy-1799931.onnx",
"reward": 4.124910477254031,
"creation_time": 1722439506.2296054,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799931.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy2/Huggy/Huggy-1999987.onnx",
"reward": 4.438950262963772,
"creation_time": 1722439771.7624729,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000066,
"file_path": "results/Huggy2/Huggy/Huggy-2000066.onnx",
"reward": 4.419206421191876,
"creation_time": 1722439771.889263,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000066.pt"
]
}
],
"final_checkpoint": {
"steps": 2000066,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.419206421191876,
"creation_time": 1722439771.889263,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000066.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.1+cu121"
}
}