smtn_ppo-Huggy / run_logs /training_status.json
ongkn's picture
Huggy
8f6d603
{
"Huggy": {
"checkpoints": [
{
"steps": 199893,
"file_path": "results/Huggy/Huggy/Huggy-199893.onnx",
"reward": 3.080959898730119,
"creation_time": 1675213366.8233726,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199893.pt"
]
},
{
"steps": 399899,
"file_path": "results/Huggy/Huggy/Huggy-399899.onnx",
"reward": 3.9042805042423185,
"creation_time": 1675213593.7208045,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399899.pt"
]
},
{
"steps": 599997,
"file_path": "results/Huggy/Huggy/Huggy-599997.onnx",
"reward": 4.123985022306442,
"creation_time": 1675213823.8724837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599997.pt"
]
},
{
"steps": 799978,
"file_path": "results/Huggy/Huggy/Huggy-799978.onnx",
"reward": 3.7586537523537267,
"creation_time": 1675214052.2103782,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799978.pt"
]
},
{
"steps": 999575,
"file_path": "results/Huggy/Huggy/Huggy-999575.onnx",
"reward": 3.713754242658615,
"creation_time": 1675214282.8835425,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999575.pt"
]
},
{
"steps": 1199746,
"file_path": "results/Huggy/Huggy/Huggy-1199746.onnx",
"reward": 3.9657156713425166,
"creation_time": 1675214514.7206526,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199746.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy/Huggy/Huggy-1399994.onnx",
"reward": 5.147425174713135,
"creation_time": 1675214745.8446093,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599310,
"file_path": "results/Huggy/Huggy/Huggy-1599310.onnx",
"reward": 3.8038785684676397,
"creation_time": 1675214971.683912,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599310.pt"
]
},
{
"steps": 1799994,
"file_path": "results/Huggy/Huggy/Huggy-1799994.onnx",
"reward": 3.6319840270229893,
"creation_time": 1675215201.592975,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799994.pt"
]
},
{
"steps": 1999938,
"file_path": "results/Huggy/Huggy/Huggy-1999938.onnx",
"reward": 3.938336025836856,
"creation_time": 1675215434.3925846,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999938.pt"
]
},
{
"steps": 2000027,
"file_path": "results/Huggy/Huggy/Huggy-2000027.onnx",
"reward": 4.013703170147809,
"creation_time": 1675215434.5026405,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000027.pt"
]
}
],
"final_checkpoint": {
"steps": 2000027,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.013703170147809,
"creation_time": 1675215434.5026405,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000027.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}