ppo-Huggy / run_logs /training_status.json
alv31415's picture
Huggy
942cead
{
"Huggy": {
"checkpoints": [
{
"steps": 199969,
"file_path": "results/Huggy/Huggy/Huggy-199969.onnx",
"reward": 3.402809262275696,
"creation_time": 1692554579.8386805,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199969.pt"
]
},
{
"steps": 399927,
"file_path": "results/Huggy/Huggy/Huggy-399927.onnx",
"reward": 3.868685157931581,
"creation_time": 1692554815.8660645,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399927.pt"
]
},
{
"steps": 599948,
"file_path": "results/Huggy/Huggy/Huggy-599948.onnx",
"reward": 3.6043722867965697,
"creation_time": 1692555057.110299,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599948.pt"
]
},
{
"steps": 799909,
"file_path": "results/Huggy/Huggy/Huggy-799909.onnx",
"reward": 3.643406583650692,
"creation_time": 1692555296.7036667,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799909.pt"
]
},
{
"steps": 999938,
"file_path": "results/Huggy/Huggy/Huggy-999938.onnx",
"reward": 3.531369671993649,
"creation_time": 1692555543.540853,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999938.pt"
]
},
{
"steps": 1199975,
"file_path": "results/Huggy/Huggy/Huggy-1199975.onnx",
"reward": 3.6153030395507812,
"creation_time": 1692555786.9512753,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199975.pt"
]
},
{
"steps": 1399925,
"file_path": "results/Huggy/Huggy/Huggy-1399925.onnx",
"reward": 3.816104122470407,
"creation_time": 1692556028.0654056,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399925.pt"
]
},
{
"steps": 1599903,
"file_path": "results/Huggy/Huggy/Huggy-1599903.onnx",
"reward": 3.6843670152700865,
"creation_time": 1692556270.9572763,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599903.pt"
]
},
{
"steps": 1799980,
"file_path": "results/Huggy/Huggy/Huggy-1799980.onnx",
"reward": 3.3616843840171553,
"creation_time": 1692556515.4958181,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799980.pt"
]
},
{
"steps": 1999875,
"file_path": "results/Huggy/Huggy/Huggy-1999875.onnx",
"reward": 3.5979459851428315,
"creation_time": 1692556754.7932284,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999875.pt"
]
},
{
"steps": 2000625,
"file_path": "results/Huggy/Huggy/Huggy-2000625.onnx",
"reward": 3.5694662758282254,
"creation_time": 1692556754.9440346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000625.pt"
]
}
],
"final_checkpoint": {
"steps": 2000625,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5694662758282254,
"creation_time": 1692556754.9440346,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000625.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}