ppo-Huggy / run_logs /training_status.json
shubhxms's picture
Huggy
a40e198
{
"Huggy": {
"checkpoints": [
{
"steps": 199971,
"file_path": "results/Huggy/Huggy/Huggy-199971.onnx",
"reward": 3.2829070049272455,
"creation_time": 1690208402.5959156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199971.pt"
]
},
{
"steps": 399983,
"file_path": "results/Huggy/Huggy/Huggy-399983.onnx",
"reward": 3.7319820578281697,
"creation_time": 1690208651.664794,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399983.pt"
]
},
{
"steps": 599928,
"file_path": "results/Huggy/Huggy/Huggy-599928.onnx",
"reward": 3.9818235874176025,
"creation_time": 1690208905.5397792,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599928.pt"
]
},
{
"steps": 799991,
"file_path": "results/Huggy/Huggy/Huggy-799991.onnx",
"reward": 3.9852652667742694,
"creation_time": 1690209156.511791,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799991.pt"
]
},
{
"steps": 999987,
"file_path": "results/Huggy/Huggy/Huggy-999987.onnx",
"reward": 4.236205971240997,
"creation_time": 1690209424.0614781,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999987.pt"
]
},
{
"steps": 1199904,
"file_path": "results/Huggy/Huggy/Huggy-1199904.onnx",
"reward": 3.6461187176508445,
"creation_time": 1690209691.583076,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199904.pt"
]
},
{
"steps": 1399971,
"file_path": "results/Huggy/Huggy/Huggy-1399971.onnx",
"reward": 3.3854453682899477,
"creation_time": 1690209953.8785791,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399971.pt"
]
},
{
"steps": 1599924,
"file_path": "results/Huggy/Huggy/Huggy-1599924.onnx",
"reward": 3.9394075894003433,
"creation_time": 1690210207.606643,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599924.pt"
]
},
{
"steps": 1799257,
"file_path": "results/Huggy/Huggy/Huggy-1799257.onnx",
"reward": 3.918845490544243,
"creation_time": 1690210464.8057234,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799257.pt"
]
},
{
"steps": 1999453,
"file_path": "results/Huggy/Huggy/Huggy-1999453.onnx",
"reward": 3.7157791820732324,
"creation_time": 1690210722.5405786,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999453.pt"
]
},
{
"steps": 2000203,
"file_path": "results/Huggy/Huggy/Huggy-2000203.onnx",
"reward": 3.5057103194688497,
"creation_time": 1690210722.7877994,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000203.pt"
]
}
],
"final_checkpoint": {
"steps": 2000203,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5057103194688497,
"creation_time": 1690210722.7877994,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000203.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}