{ "Huggy": { "checkpoints": [ { "steps": 20218, "file_path": "results/Huggy/Huggy/Huggy-20218.onnx", "reward": 1.5803483767168862, "creation_time": 1687944634.9247677, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-20218.pt" ] } ], "final_checkpoint": { "steps": 20218, "file_path": "results/Huggy/Huggy.onnx", "reward": 1.5803483767168862, "creation_time": 1687944634.9247677, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-20218.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.31.0.dev0", "torch_version": "1.11.0+cu102" } }