ppo-Huggy / run_logs /training_status.json
aratshimyanga's picture
Huggy
cc85772
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199808,
"file_path": "results/Huggy/Huggy/Huggy-199808.onnx",
"reward": 3.522390412750529,
"creation_time": 1692144074.8715613,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199808.pt"
]
},
{
"steps": 399953,
"file_path": "results/Huggy/Huggy/Huggy-399953.onnx",
"reward": 3.67569022508044,
"creation_time": 1692144317.1146305,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399953.pt"
]
},
{
"steps": 599891,
"file_path": "results/Huggy/Huggy/Huggy-599891.onnx",
"reward": 4.223839841783047,
"creation_time": 1692144564.963386,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599891.pt"
]
},
{
"steps": 799969,
"file_path": "results/Huggy/Huggy/Huggy-799969.onnx",
"reward": 3.7950513129028978,
"creation_time": 1692144808.5867524,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799969.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy/Huggy/Huggy-999960.onnx",
"reward": 4.101836080675001,
"creation_time": 1692145056.5340574,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199944,
"file_path": "results/Huggy/Huggy/Huggy-1199944.onnx",
"reward": 3.719369573028464,
"creation_time": 1692145300.2911491,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199944.pt"
]
},
{
"steps": 1399947,
"file_path": "results/Huggy/Huggy/Huggy-1399947.onnx",
"reward": 4.193054298559825,
"creation_time": 1692145544.8174322,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399947.pt"
]
},
{
"steps": 1599953,
"file_path": "results/Huggy/Huggy/Huggy-1599953.onnx",
"reward": 3.7308883680934795,
"creation_time": 1692145790.2088013,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599953.pt"
]
},
{
"steps": 1799960,
"file_path": "results/Huggy/Huggy/Huggy-1799960.onnx",
"reward": 3.8916685795172667,
"creation_time": 1692146040.9201603,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799960.pt"
]
},
{
"steps": 1999989,
"file_path": "results/Huggy/Huggy/Huggy-1999989.onnx",
"reward": 4.075259197216767,
"creation_time": 1692146289.8845987,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999989.pt"
]
},
{
"steps": 2000068,
"file_path": "results/Huggy/Huggy/Huggy-2000068.onnx",
"reward": 4.064561639191969,
"creation_time": 1692146290.0132363,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000068.pt"
]
}
],
"final_checkpoint": {
"steps": 2000068,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.064561639191969,
"creation_time": 1692146290.0132363,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000068.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}