ppo-Huggy / run_logs /training_status.json
Farseer-W's picture
Huggy tutorial
1269d30 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199786,
"file_path": "results/Huggy2/Huggy/Huggy-199786.onnx",
"reward": 2.9866676086729225,
"creation_time": 1731241641.4559433,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199786.pt"
]
},
{
"steps": 399938,
"file_path": "results/Huggy2/Huggy/Huggy-399938.onnx",
"reward": 3.5831665992736816,
"creation_time": 1731241902.2205982,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399938.pt"
]
},
{
"steps": 599984,
"file_path": "results/Huggy2/Huggy/Huggy-599984.onnx",
"reward": 0.3588714400927226,
"creation_time": 1731242168.606778,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599984.pt"
]
},
{
"steps": 799303,
"file_path": "results/Huggy2/Huggy/Huggy-799303.onnx",
"reward": 3.8467973314675707,
"creation_time": 1731242428.1570396,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799303.pt"
]
},
{
"steps": 999899,
"file_path": "results/Huggy2/Huggy/Huggy-999899.onnx",
"reward": 3.680588083326324,
"creation_time": 1731242690.6923854,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999899.pt"
]
},
{
"steps": 1199887,
"file_path": "results/Huggy2/Huggy/Huggy-1199887.onnx",
"reward": 3.798086150604136,
"creation_time": 1731242952.7461352,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199887.pt"
]
},
{
"steps": 1399968,
"file_path": "results/Huggy2/Huggy/Huggy-1399968.onnx",
"reward": 3.734219048321247,
"creation_time": 1731243206.5929508,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399968.pt"
]
},
{
"steps": 1599965,
"file_path": "results/Huggy2/Huggy/Huggy-1599965.onnx",
"reward": 3.511519280346957,
"creation_time": 1731243465.4739363,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599965.pt"
]
},
{
"steps": 1799428,
"file_path": "results/Huggy2/Huggy/Huggy-1799428.onnx",
"reward": 3.753928962875815,
"creation_time": 1731243727.7443428,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799428.pt"
]
},
{
"steps": 1999832,
"file_path": "results/Huggy2/Huggy/Huggy-1999832.onnx",
"reward": 3.5209813565015793,
"creation_time": 1731243990.0973282,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999832.pt"
]
},
{
"steps": 2000582,
"file_path": "results/Huggy2/Huggy/Huggy-2000582.onnx",
"reward": 2.7070990006128945,
"creation_time": 1731243990.24241,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000582.pt"
]
}
],
"final_checkpoint": {
"steps": 2000582,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.7070990006128945,
"creation_time": 1731243990.24241,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000582.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.0+cu121"
}
}