ppo-Huggy / run_logs /training_status.json
menelaos's picture
Huggy
bae9c0b
raw
history blame
4.37 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199729,
"file_path": "results/Huggy/Huggy/Huggy-199729.onnx",
"reward": 3.48253853356137,
"creation_time": 1673372959.6009884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199729.pt"
]
},
{
"steps": 399920,
"file_path": "results/Huggy/Huggy/Huggy-399920.onnx",
"reward": 4.087349639422651,
"creation_time": 1673373191.8417654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399920.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy/Huggy/Huggy-599933.onnx",
"reward": 4.08766138837451,
"creation_time": 1673373446.2274654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799936,
"file_path": "results/Huggy/Huggy/Huggy-799936.onnx",
"reward": 3.960768826513983,
"creation_time": 1673373701.951069,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799936.pt"
]
},
{
"steps": 999932,
"file_path": "results/Huggy/Huggy/Huggy-999932.onnx",
"reward": 3.669346879146717,
"creation_time": 1673373961.3153243,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999932.pt"
]
},
{
"steps": 1199878,
"file_path": "results/Huggy/Huggy/Huggy-1199878.onnx",
"reward": 3.759998026821348,
"creation_time": 1673374225.3697937,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199878.pt"
]
},
{
"steps": 1399977,
"file_path": "results/Huggy/Huggy/Huggy-1399977.onnx",
"reward": 3.4316719429833547,
"creation_time": 1673374481.1905448,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399977.pt"
]
},
{
"steps": 1599923,
"file_path": "results/Huggy/Huggy/Huggy-1599923.onnx",
"reward": 3.8901916621911403,
"creation_time": 1673374726.048887,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599923.pt"
]
},
{
"steps": 1799560,
"file_path": "results/Huggy/Huggy/Huggy-1799560.onnx",
"reward": 4.172338195083555,
"creation_time": 1673374990.9821188,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799560.pt"
]
},
{
"steps": 1999977,
"file_path": "results/Huggy/Huggy/Huggy-1999977.onnx",
"reward": 3.7212144700256555,
"creation_time": 1673375257.027856,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999977.pt"
]
},
{
"steps": 2000025,
"file_path": "results/Huggy/Huggy/Huggy-2000025.onnx",
"reward": 3.6990382146835326,
"creation_time": 1673375257.166602,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
],
"final_checkpoint": {
"steps": 2000025,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6990382146835326,
"creation_time": 1673375257.166602,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000025.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}