ppo-Huggy / run_logs /training_status.json
jaredoong's picture
Huggy
2641997
{
"Huggy": {
"checkpoints": [
{
"steps": 199761,
"file_path": "results/Huggy/Huggy/Huggy-199761.onnx",
"reward": 3.072959595001661,
"creation_time": 1692624641.562676,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199761.pt"
]
},
{
"steps": 399886,
"file_path": "results/Huggy/Huggy/Huggy-399886.onnx",
"reward": 3.7864317297935486,
"creation_time": 1692624880.1494927,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399886.pt"
]
},
{
"steps": 599852,
"file_path": "results/Huggy/Huggy/Huggy-599852.onnx",
"reward": 2.924211621284485,
"creation_time": 1692625123.8798213,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599852.pt"
]
},
{
"steps": 799390,
"file_path": "results/Huggy/Huggy/Huggy-799390.onnx",
"reward": 3.675836137988988,
"creation_time": 1692625363.3598711,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799390.pt"
]
},
{
"steps": 999997,
"file_path": "results/Huggy/Huggy/Huggy-999997.onnx",
"reward": 3.8394303483921184,
"creation_time": 1692625609.1876748,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999997.pt"
]
},
{
"steps": 1199963,
"file_path": "results/Huggy/Huggy/Huggy-1199963.onnx",
"reward": 3.8241373625668613,
"creation_time": 1692625850.4507816,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199963.pt"
]
},
{
"steps": 1399949,
"file_path": "results/Huggy/Huggy/Huggy-1399949.onnx",
"reward": 3.9160684549869016,
"creation_time": 1692626088.9500103,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399949.pt"
]
},
{
"steps": 1599993,
"file_path": "results/Huggy/Huggy/Huggy-1599993.onnx",
"reward": 3.777869704130449,
"creation_time": 1692626339.663611,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599993.pt"
]
},
{
"steps": 1799956,
"file_path": "results/Huggy/Huggy/Huggy-1799956.onnx",
"reward": 3.592729022222407,
"creation_time": 1692626583.9841206,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799956.pt"
]
},
{
"steps": 1999955,
"file_path": "results/Huggy/Huggy/Huggy-1999955.onnx",
"reward": 3.467632632702589,
"creation_time": 1692626835.5732224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999955.pt"
]
},
{
"steps": 2000039,
"file_path": "results/Huggy/Huggy/Huggy-2000039.onnx",
"reward": 3.47966489721747,
"creation_time": 1692626835.6989148,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000039.pt"
]
}
],
"final_checkpoint": {
"steps": 2000039,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.47966489721747,
"creation_time": 1692626835.6989148,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000039.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}