ppo-Huggy / run_logs /training_status.json
Rui31415's picture
Huggy
f07f9b7
{
"Huggy": {
"checkpoints": [
{
"steps": 199944,
"file_path": "results/Huggy/Huggy/Huggy-199944.onnx",
"reward": 3.2485241940323735,
"creation_time": 1689176443.2812536,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199944.pt"
]
},
{
"steps": 399971,
"file_path": "results/Huggy/Huggy/Huggy-399971.onnx",
"reward": 3.860613364726305,
"creation_time": 1689176689.7446656,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399971.pt"
]
},
{
"steps": 599995,
"file_path": "results/Huggy/Huggy/Huggy-599995.onnx",
"reward": 4.193243607230809,
"creation_time": 1689176937.499055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599995.pt"
]
},
{
"steps": 799771,
"file_path": "results/Huggy/Huggy/Huggy-799771.onnx",
"reward": 3.8672857138999674,
"creation_time": 1689177179.8761756,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799771.pt"
]
},
{
"steps": 999957,
"file_path": "results/Huggy/Huggy/Huggy-999957.onnx",
"reward": 3.737516005296965,
"creation_time": 1689177433.2471008,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999957.pt"
]
},
{
"steps": 1199453,
"file_path": "results/Huggy/Huggy/Huggy-1199453.onnx",
"reward": 3.3601031765645866,
"creation_time": 1689177681.9332788,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199453.pt"
]
},
{
"steps": 1399944,
"file_path": "results/Huggy/Huggy/Huggy-1399944.onnx",
"reward": 3.8833486840531632,
"creation_time": 1689177923.5344253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399944.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy/Huggy/Huggy-1599982.onnx",
"reward": 3.7849275279045105,
"creation_time": 1689178168.8804603,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799946,
"file_path": "results/Huggy/Huggy/Huggy-1799946.onnx",
"reward": 3.8843780664765104,
"creation_time": 1689178415.102903,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799946.pt"
]
},
{
"steps": 1999969,
"file_path": "results/Huggy/Huggy/Huggy-1999969.onnx",
"reward": 3.3687322700724884,
"creation_time": 1689178674.8693898,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999969.pt"
]
},
{
"steps": 2000014,
"file_path": "results/Huggy/Huggy/Huggy-2000014.onnx",
"reward": 3.328121703011649,
"creation_time": 1689178674.999112,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
],
"final_checkpoint": {
"steps": 2000014,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.328121703011649,
"creation_time": 1689178674.999112,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000014.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}