ppo-SnowballTarget1 / run_logs /training_status.json
dbaibak's picture
Adjested parapmeters
eff6950
{
"SnowballTarget": {
"checkpoints": [
{
"steps": 599936,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-599936.onnx",
"reward": 25.818181818181817,
"creation_time": 1673781154.4946294,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-599936.pt"
]
},
{
"steps": 649960,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-649960.onnx",
"reward": 28.0,
"creation_time": 1673781282.7175827,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-649960.pt"
]
},
{
"steps": 699984,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-699984.onnx",
"reward": 27.0,
"creation_time": 1673781410.666404,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-699984.pt"
]
},
{
"steps": 749984,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-749984.onnx",
"reward": 25.181818181818183,
"creation_time": 1673781539.9245515,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-749984.pt"
]
},
{
"steps": 799944,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-799944.onnx",
"reward": 26.272727272727273,
"creation_time": 1673781669.3677254,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-799944.pt"
]
},
{
"steps": 849968,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-849968.onnx",
"reward": 27.363636363636363,
"creation_time": 1673781801.3550549,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-849968.pt"
]
},
{
"steps": 899992,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-899992.onnx",
"reward": 26.90909090909091,
"creation_time": 1673781931.7085443,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-899992.pt"
]
},
{
"steps": 949992,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-949992.onnx",
"reward": 28.0,
"creation_time": 1673782060.437772,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-949992.pt"
]
},
{
"steps": 999952,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-999952.onnx",
"reward": 26.545454545454547,
"creation_time": 1673782189.4641516,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-999952.pt"
]
},
{
"steps": 1000208,
"file_path": "results/SnowballTarget1/SnowballTarget/SnowballTarget-1000208.onnx",
"reward": 26.545454545454547,
"creation_time": 1673782189.6510148,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-1000208.pt"
]
}
],
"final_checkpoint": {
"steps": 1000208,
"file_path": "results/SnowballTarget1/SnowballTarget.onnx",
"reward": 26.545454545454547,
"creation_time": 1673782189.6510148,
"auxillary_file_paths": [
"results/SnowballTarget1/SnowballTarget/SnowballTarget-1000208.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}