ppo-Huggy / run_logs /training_status.json
edures's picture
Huggy
0870f00
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199850,
"file_path": "results/Huggy/Huggy/Huggy-199850.onnx",
"reward": 3.3558412392934165,
"creation_time": 1688910592.9710023,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199850.pt"
]
},
{
"steps": 399827,
"file_path": "results/Huggy/Huggy/Huggy-399827.onnx",
"reward": 3.878401592373848,
"creation_time": 1688910850.5460165,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399827.pt"
]
},
{
"steps": 599997,
"file_path": "results/Huggy/Huggy/Huggy-599997.onnx",
"reward": 4.168707685811179,
"creation_time": 1688911112.2218719,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599997.pt"
]
},
{
"steps": 799904,
"file_path": "results/Huggy/Huggy/Huggy-799904.onnx",
"reward": 3.8597427490037246,
"creation_time": 1688911373.3765192,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799904.pt"
]
},
{
"steps": 999881,
"file_path": "results/Huggy/Huggy/Huggy-999881.onnx",
"reward": 3.7480789804831147,
"creation_time": 1688911633.89501,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999881.pt"
]
},
{
"steps": 1199969,
"file_path": "results/Huggy/Huggy/Huggy-1199969.onnx",
"reward": 4.059000778198242,
"creation_time": 1688911899.6578145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199969.pt"
]
},
{
"steps": 1399914,
"file_path": "results/Huggy/Huggy/Huggy-1399914.onnx",
"reward": 3.910499219921814,
"creation_time": 1688912158.2981546,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399914.pt"
]
},
{
"steps": 1599960,
"file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
"reward": 3.7320871225202272,
"creation_time": 1688912424.7189038,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599960.pt"
]
},
{
"steps": 1799986,
"file_path": "results/Huggy/Huggy/Huggy-1799986.onnx",
"reward": 3.8471788058155463,
"creation_time": 1688912690.1475546,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799986.pt"
]
},
{
"steps": 1999950,
"file_path": "results/Huggy/Huggy/Huggy-1999950.onnx",
"reward": 3.5999779962911838,
"creation_time": 1688912956.780749,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999950.pt"
]
},
{
"steps": 2000020,
"file_path": "results/Huggy/Huggy/Huggy-2000020.onnx",
"reward": 3.5996889982904707,
"creation_time": 1688912956.9209201,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000020.pt"
]
}
],
"final_checkpoint": {
"steps": 2000020,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5996889982904707,
"creation_time": 1688912956.9209201,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000020.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}