ppo-Huggy / run_logs /training_status.json
cpgrant's picture
Huggy2
cc92cdc verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199762,
"file_path": "results/Huggy2/Huggy/Huggy-199762.onnx",
"reward": 3.6219206058372886,
"creation_time": 1724349832.3428004,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199762.pt"
]
},
{
"steps": 399665,
"file_path": "results/Huggy2/Huggy/Huggy-399665.onnx",
"reward": 3.931346505435545,
"creation_time": 1724349959.0298657,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399665.pt"
]
},
{
"steps": 599890,
"file_path": "results/Huggy2/Huggy/Huggy-599890.onnx",
"reward": 3.857532897899891,
"creation_time": 1724350086.1223586,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599890.pt"
]
},
{
"steps": 799976,
"file_path": "results/Huggy2/Huggy/Huggy-799976.onnx",
"reward": 4.1084557663310655,
"creation_time": 1724350216.1711872,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799976.pt"
]
},
{
"steps": 999947,
"file_path": "results/Huggy2/Huggy/Huggy-999947.onnx",
"reward": 3.685474814640151,
"creation_time": 1724350349.488367,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999947.pt"
]
},
{
"steps": 1199958,
"file_path": "results/Huggy2/Huggy/Huggy-1199958.onnx",
"reward": 3.895549933115641,
"creation_time": 1724350482.428126,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199958.pt"
]
},
{
"steps": 1399920,
"file_path": "results/Huggy2/Huggy/Huggy-1399920.onnx",
"reward": 3.2277625799179077,
"creation_time": 1724350615.8865,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399920.pt"
]
},
{
"steps": 1599956,
"file_path": "results/Huggy2/Huggy/Huggy-1599956.onnx",
"reward": 3.9330031605143296,
"creation_time": 1724350749.22875,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599956.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy2/Huggy/Huggy-1799954.onnx",
"reward": 4.081536713710501,
"creation_time": 1724350880.9714763,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999961,
"file_path": "results/Huggy2/Huggy/Huggy-1999961.onnx",
"reward": 4.086140624605692,
"creation_time": 1724351011.687303,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999961.pt"
]
},
{
"steps": 2000071,
"file_path": "results/Huggy2/Huggy/Huggy-2000071.onnx",
"reward": 4.113011433268493,
"creation_time": 1724351011.7698567,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000071.pt"
]
}
],
"final_checkpoint": {
"steps": 2000071,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.113011433268493,
"creation_time": 1724351011.7698567,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000071.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.4.0+cu121"
}
}