ppo-Huggy / run_logs /training_status.json
pratsy's picture
Huggy
d0f6b4b
{
"Huggy": {
"checkpoints": [
{
"steps": 199756,
"file_path": "results/Huggy/Huggy/Huggy-199756.onnx",
"reward": 3.324016609125667,
"creation_time": 1688059034.643485,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199756.pt"
]
},
{
"steps": 399904,
"file_path": "results/Huggy/Huggy/Huggy-399904.onnx",
"reward": 3.9327769924382694,
"creation_time": 1688059269.9884217,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399904.pt"
]
},
{
"steps": 599987,
"file_path": "results/Huggy/Huggy/Huggy-599987.onnx",
"reward": 4.478786131914924,
"creation_time": 1688059509.5921288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599987.pt"
]
},
{
"steps": 799931,
"file_path": "results/Huggy/Huggy/Huggy-799931.onnx",
"reward": 3.9843229971613203,
"creation_time": 1688059752.6541917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799931.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy/Huggy/Huggy-999955.onnx",
"reward": 3.7612709650548837,
"creation_time": 1688059993.0037107,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199994,
"file_path": "results/Huggy/Huggy/Huggy-1199994.onnx",
"reward": 3.7183290467423906,
"creation_time": 1688060236.307065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199994.pt"
]
},
{
"steps": 1399631,
"file_path": "results/Huggy/Huggy/Huggy-1399631.onnx",
"reward": 3.7897839216085583,
"creation_time": 1688060476.8624885,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399631.pt"
]
},
{
"steps": 1599472,
"file_path": "results/Huggy/Huggy/Huggy-1599472.onnx",
"reward": 3.531228153983508,
"creation_time": 1688060725.7041714,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599472.pt"
]
},
{
"steps": 1799943,
"file_path": "results/Huggy/Huggy/Huggy-1799943.onnx",
"reward": 3.7880370605339126,
"creation_time": 1688060971.8997488,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799943.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 3.987368631362915,
"creation_time": 1688061220.8907142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000021,
"file_path": "results/Huggy/Huggy/Huggy-2000021.onnx",
"reward": 3.9310972459854616,
"creation_time": 1688061221.0180843,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000021.pt"
]
}
],
"final_checkpoint": {
"steps": 2000021,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9310972459854616,
"creation_time": 1688061221.0180843,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000021.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}