ppo-Pyramids / run_logs /timers.json
lightyip's picture
First Push
0a2a8fe verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5275444388389587,
"min": 0.5229937434196472,
"max": 1.4919500350952148,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 15936.0615234375,
"min": 15882.2734375,
"max": 45259.796875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989999.0,
"min": 29952.0,
"max": 989999.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989999.0,
"min": 29952.0,
"max": 989999.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.24824641644954681,
"min": -0.11056908220052719,
"max": 0.2682746946811676,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 64.29582214355469,
"min": -26.536579132080078,
"max": 69.75141906738281,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.10493466258049011,
"min": -0.07974647730588913,
"max": 0.2803550064563751,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 27.178077697753906,
"min": -20.73408317565918,
"max": 67.28520202636719,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.0689414441035462,
"min": 0.06591482251279097,
"max": 0.07353983002362968,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9651802174496468,
"min": 0.46370742161550504,
"max": 1.0899152256606612,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015097684056159136,
"min": 8.922779325853028e-05,
"max": 0.015097684056159136,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.21136757678622792,
"min": 0.000981505725843833,
"max": 0.21136757678622792,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.250433297507145e-06,
"min": 7.250433297507145e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010150606616510002,
"min": 0.00010150606616510002,
"max": 0.0028984757338415,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10241677857142858,
"min": 0.10241677857142858,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4338349000000001,
"min": 1.3691136000000002,
"max": 2.2750194,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002514361792857143,
"min": 0.0002514361792857143,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035201065100000004,
"min": 0.0035201065100000004,
"max": 0.09663923414999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.009967753663659096,
"min": 0.00953847449272871,
"max": 0.44266510009765625,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.13954855501651764,
"min": 0.13579756021499634,
"max": 3.0986557006835938,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 569.0392156862745,
"min": 552.1551724137931,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29021.0,
"min": 15984.0,
"max": 32430.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.1563685945436066,
"min": -1.0000000521540642,
"max": 1.1563685945436066,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 58.97479832172394,
"min": -32.000001668930054,
"max": 61.969598077237606,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.1563685945436066,
"min": -1.0000000521540642,
"max": 1.1563685945436066,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 58.97479832172394,
"min": -32.000001668930054,
"max": 61.969598077237606,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.05858599430711383,
"min": 0.05858599430711383,
"max": 9.149884779006243,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.9878857096628053,
"min": 2.879139439028222,
"max": 146.39815646409988,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1714117409",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1714120362"
},
"total": 2953.092644154,
"count": 1,
"self": 0.6416723239999556,
"children": {
"run_training.setup": {
"total": 0.07038534899993465,
"count": 1,
"self": 0.07038534899993465
},
"TrainerController.start_learning": {
"total": 2952.380586481,
"count": 1,
"self": 2.1844519400378886,
"children": {
"TrainerController._reset_env": {
"total": 2.758355896000012,
"count": 1,
"self": 2.758355896000012
},
"TrainerController.advance": {
"total": 2947.350259628962,
"count": 63199,
"self": 2.49156530700202,
"children": {
"env_step": {
"total": 1909.8899998869565,
"count": 63199,
"self": 1752.621921738913,
"children": {
"SubprocessEnvManager._take_step": {
"total": 155.85631226405394,
"count": 63199,
"self": 6.920539922923126,
"children": {
"TorchPolicy.evaluate": {
"total": 148.93577234113081,
"count": 62556,
"self": 148.93577234113081
}
}
},
"workers": {
"total": 1.411765883989574,
"count": 63199,
"self": 0.0,
"children": {
"worker_root": {
"total": 2945.3910633169517,
"count": 63199,
"is_parallel": true,
"self": 1373.9750859968635,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.004433328000231995,
"count": 1,
"is_parallel": true,
"self": 0.0012978209997527301,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.003135507000479265,
"count": 8,
"is_parallel": true,
"self": 0.003135507000479265
}
}
},
"UnityEnvironment.step": {
"total": 0.06658186599997862,
"count": 1,
"is_parallel": true,
"self": 0.0008725630000299134,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005565849996855832,
"count": 1,
"is_parallel": true,
"self": 0.0005565849996855832
},
"communicator.exchange": {
"total": 0.0628538330001902,
"count": 1,
"is_parallel": true,
"self": 0.0628538330001902
},
"steps_from_proto": {
"total": 0.0022988850000729144,
"count": 1,
"is_parallel": true,
"self": 0.0005538640011764073,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001745020998896507,
"count": 8,
"is_parallel": true,
"self": 0.001745020998896507
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1571.4159773200881,
"count": 63198,
"is_parallel": true,
"self": 50.64220372603086,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 32.037649113050065,
"count": 63198,
"is_parallel": true,
"self": 32.037649113050065
},
"communicator.exchange": {
"total": 1358.2502736570063,
"count": 63198,
"is_parallel": true,
"self": 1358.2502736570063
},
"steps_from_proto": {
"total": 130.48585082400086,
"count": 63198,
"is_parallel": true,
"self": 27.809712724718793,
"children": {
"_process_rank_one_or_two_observation": {
"total": 102.67613809928207,
"count": 505584,
"is_parallel": true,
"self": 102.67613809928207
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1034.9686944350033,
"count": 63199,
"self": 3.9485818820444365,
"children": {
"process_trajectory": {
"total": 161.0610769889604,
"count": 63199,
"self": 160.8682627499611,
"children": {
"RLTrainer._checkpoint": {
"total": 0.19281423899928996,
"count": 2,
"self": 0.19281423899928996
}
}
},
"_update_policy": {
"total": 869.9590355639984,
"count": 424,
"self": 355.27909520389085,
"children": {
"TorchPPOOptimizer.update": {
"total": 514.6799403601076,
"count": 22920,
"self": 514.6799403601076
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.129998943535611e-07,
"count": 1,
"self": 9.129998943535611e-07
},
"TrainerController._save_models": {
"total": 0.08751810300054785,
"count": 1,
"self": 0.0022383310006262036,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08527977199992165,
"count": 1,
"self": 0.08527977199992165
}
}
}
}
}
}
}