ppo-Huggy / run_logs /training_status.json
agoyal496's picture
Huggy
1e97368
raw
history blame
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199697,
"file_path": "results/Huggy/Huggy/Huggy-199697.onnx",
"reward": 3.5679496992379427,
"creation_time": 1692217250.743937,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199697.pt"
]
},
{
"steps": 399916,
"file_path": "results/Huggy/Huggy/Huggy-399916.onnx",
"reward": 3.5450866792131874,
"creation_time": 1692217527.6098602,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399916.pt"
]
},
{
"steps": 599970,
"file_path": "results/Huggy/Huggy/Huggy-599970.onnx",
"reward": 3.555763299266497,
"creation_time": 1692217804.6638231,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599970.pt"
]
},
{
"steps": 799927,
"file_path": "results/Huggy/Huggy/Huggy-799927.onnx",
"reward": 3.818216373806908,
"creation_time": 1692218077.9152787,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799927.pt"
]
},
{
"steps": 999650,
"file_path": "results/Huggy/Huggy/Huggy-999650.onnx",
"reward": 3.6712331642098976,
"creation_time": 1692218359.6830852,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999650.pt"
]
},
{
"steps": 1199974,
"file_path": "results/Huggy/Huggy/Huggy-1199974.onnx",
"reward": 4.032500128413355,
"creation_time": 1692218634.777689,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199974.pt"
]
},
{
"steps": 1399984,
"file_path": "results/Huggy/Huggy/Huggy-1399984.onnx",
"reward": 4.0439106605269695,
"creation_time": 1692218913.680043,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399984.pt"
]
},
{
"steps": 1599965,
"file_path": "results/Huggy/Huggy/Huggy-1599965.onnx",
"reward": 3.787230677560929,
"creation_time": 1692219193.9473193,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599965.pt"
]
},
{
"steps": 1799911,
"file_path": "results/Huggy/Huggy/Huggy-1799911.onnx",
"reward": 3.8454588560657648,
"creation_time": 1692219484.1186554,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799911.pt"
]
},
{
"steps": 1999964,
"file_path": "results/Huggy/Huggy/Huggy-1999964.onnx",
"reward": 3.301635592670764,
"creation_time": 1692219775.163652,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999964.pt"
]
},
{
"steps": 2000034,
"file_path": "results/Huggy/Huggy/Huggy-2000034.onnx",
"reward": 3.3193597356478373,
"creation_time": 1692219775.2970736,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
],
"final_checkpoint": {
"steps": 2000034,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3193597356478373,
"creation_time": 1692219775.2970736,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000034.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}