ppo-Huggy-v1 / run_logs /training_status.json
biwako's picture
Huggy
d6057d7
{
"Huggy": {
"checkpoints": [
{
"steps": 199978,
"file_path": "results/Huggy/Huggy/Huggy-199978.onnx",
"reward": 3.500188144945329,
"creation_time": 1682316298.0066607,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199978.pt"
]
},
{
"steps": 399900,
"file_path": "results/Huggy/Huggy/Huggy-399900.onnx",
"reward": 3.8941244202501633,
"creation_time": 1682316526.2394507,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399900.pt"
]
},
{
"steps": 599850,
"file_path": "results/Huggy/Huggy/Huggy-599850.onnx",
"reward": 3.6040804243087767,
"creation_time": 1682316758.0788083,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599850.pt"
]
},
{
"steps": 799970,
"file_path": "results/Huggy/Huggy/Huggy-799970.onnx",
"reward": 3.742728171274834,
"creation_time": 1682316985.1019967,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799970.pt"
]
},
{
"steps": 999903,
"file_path": "results/Huggy/Huggy/Huggy-999903.onnx",
"reward": 4.115381216245984,
"creation_time": 1682317216.5629132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999903.pt"
]
},
{
"steps": 1199989,
"file_path": "results/Huggy/Huggy/Huggy-1199989.onnx",
"reward": 3.7060928023778477,
"creation_time": 1682317449.5534956,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199989.pt"
]
},
{
"steps": 1399812,
"file_path": "results/Huggy/Huggy/Huggy-1399812.onnx",
"reward": 3.4162176113862257,
"creation_time": 1682317682.4376137,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399812.pt"
]
},
{
"steps": 1599994,
"file_path": "results/Huggy/Huggy/Huggy-1599994.onnx",
"reward": 3.9635402838820997,
"creation_time": 1682317912.0868158,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599994.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 3.798056531543574,
"creation_time": 1682318146.6227944,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999639,
"file_path": "results/Huggy/Huggy/Huggy-1999639.onnx",
"reward": 4.105912736483982,
"creation_time": 1682318379.1415002,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999639.pt"
]
},
{
"steps": 2000389,
"file_path": "results/Huggy/Huggy/Huggy-2000389.onnx",
"reward": 3.942090849543727,
"creation_time": 1682318379.3019457,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000389.pt"
]
}
],
"final_checkpoint": {
"steps": 2000389,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.942090849543727,
"creation_time": 1682318379.3019457,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000389.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}