ppo-Huggy / run_logs /training_status.json
ankarb's picture
Huggy
5b7a05c verified
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199927,
"file_path": "results/Huggy/Huggy/Huggy-199927.onnx",
"reward": 3.435059010036408,
"creation_time": 1705133309.064287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199927.pt"
]
},
{
"steps": 399948,
"file_path": "results/Huggy/Huggy/Huggy-399948.onnx",
"reward": 3.6714822660232413,
"creation_time": 1705133824.6301167,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399948.pt"
]
},
{
"steps": 599891,
"file_path": "results/Huggy/Huggy/Huggy-599891.onnx",
"reward": 4.712309002876282,
"creation_time": 1705134349.3993812,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599891.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy/Huggy/Huggy-799956.onnx",
"reward": 3.866971597952001,
"creation_time": 1705134859.2544339,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999989,
"file_path": "results/Huggy/Huggy/Huggy-999989.onnx",
"reward": 4.001953373743794,
"creation_time": 1705135393.010274,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999989.pt"
]
},
{
"steps": 1199942,
"file_path": "results/Huggy/Huggy/Huggy-1199942.onnx",
"reward": 3.5858012437820435,
"creation_time": 1705135927.764126,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199942.pt"
]
},
{
"steps": 1399941,
"file_path": "results/Huggy/Huggy/Huggy-1399941.onnx",
"reward": 3.8079528652326893,
"creation_time": 1705136445.7967358,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399941.pt"
]
},
{
"steps": 1599994,
"file_path": "results/Huggy/Huggy/Huggy-1599994.onnx",
"reward": 3.953507703478618,
"creation_time": 1705137011.9382315,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599994.pt"
]
},
{
"steps": 1799949,
"file_path": "results/Huggy/Huggy/Huggy-1799949.onnx",
"reward": 3.9962807717530624,
"creation_time": 1705137546.4948204,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799949.pt"
]
},
{
"steps": 1999940,
"file_path": "results/Huggy/Huggy/Huggy-1999940.onnx",
"reward": 3.993728185599705,
"creation_time": 1705138078.8373687,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999940.pt"
]
},
{
"steps": 2000012,
"file_path": "results/Huggy/Huggy/Huggy-2000012.onnx",
"reward": 3.971042405675959,
"creation_time": 1705138078.988027,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000012.pt"
]
}
],
"final_checkpoint": {
"steps": 2000012,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.971042405675959,
"creation_time": 1705138078.988027,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000012.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}