ppo-Pyramids / run_logs /timers.json
HaziqRazali's picture
First Push
2c09802
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.41078394651412964,
"min": 0.4095400869846344,
"max": 1.3696956634521484,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 12251.220703125,
"min": 12220.67578125,
"max": 41551.0859375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989976.0,
"min": 29945.0,
"max": 989976.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989976.0,
"min": 29945.0,
"max": 989976.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.35433879494667053,
"min": -0.09399523586034775,
"max": 0.37853771448135376,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 92.83676147460938,
"min": -22.652851104736328,
"max": 99.555419921875,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.003528849920257926,
"min": -0.03828004375100136,
"max": 0.4169260263442993,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -0.924558699131012,
"min": -9.646571159362793,
"max": 98.81147003173828,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07130675445798608,
"min": 0.063502892873089,
"max": 0.0733289213507718,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.998294562411805,
"min": 0.5021237960530528,
"max": 1.0283784134565697,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.016220093043687354,
"min": 0.0014942152583671184,
"max": 0.017930829031480165,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.22708130261162293,
"min": 0.016466219898067392,
"max": 0.2510316064407223,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.540833200707147e-06,
"min": 7.540833200707147e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010557166480990007,
"min": 0.00010557166480990007,
"max": 0.0036324283891905995,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10251357857142859,
"min": 0.10251357857142859,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4351901000000002,
"min": 1.3886848,
"max": 2.610809400000001,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002611064992857145,
"min": 0.0002611064992857145,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0036554909900000025,
"min": 0.0036554909900000025,
"max": 0.12109985906000001,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012091363780200481,
"min": 0.012091363780200481,
"max": 0.4375329911708832,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1692790985107422,
"min": 0.1692790985107422,
"max": 3.0627310276031494,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 464.0983606557377,
"min": 453.95588235294116,
"max": 992.4705882352941,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 28310.0,
"min": 16872.0,
"max": 32589.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.371921287208307,
"min": -0.9280968265187356,
"max": 1.4577852651915129,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 83.68719851970673,
"min": -30.619401656091213,
"max": 99.12939803302288,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.371921287208307,
"min": -0.9280968265187356,
"max": 1.4577852651915129,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 83.68719851970673,
"min": -30.619401656091213,
"max": 99.12939803302288,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.05894064438122813,
"min": 0.058526371393193534,
"max": 8.54708165456267,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.595379307254916,
"min": 3.595379307254916,
"max": 145.30038812756538,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1690549358",
"python_version": "3.10.6 (main, May 29 2023, 11:10:38) [GCC 11.3.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1690551676"
},
"total": 2317.955381923,
"count": 1,
"self": 0.7924951890004195,
"children": {
"run_training.setup": {
"total": 0.05377989099997649,
"count": 1,
"self": 0.05377989099997649
},
"TrainerController.start_learning": {
"total": 2317.1091068429996,
"count": 1,
"self": 1.598196981030469,
"children": {
"TrainerController._reset_env": {
"total": 4.691545566000059,
"count": 1,
"self": 4.691545566000059
},
"TrainerController.advance": {
"total": 2310.661099816969,
"count": 63567,
"self": 1.58558431892925,
"children": {
"env_step": {
"total": 1618.1647922400575,
"count": 63567,
"self": 1495.8991322170702,
"children": {
"SubprocessEnvManager._take_step": {
"total": 121.30193584504696,
"count": 63567,
"self": 5.259968947023026,
"children": {
"TorchPolicy.evaluate": {
"total": 116.04196689802393,
"count": 62552,
"self": 116.04196689802393
}
}
},
"workers": {
"total": 0.9637241779403212,
"count": 63567,
"self": 0.0,
"children": {
"worker_root": {
"total": 2311.181558830086,
"count": 63567,
"is_parallel": true,
"self": 944.0419506230501,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0027037289999043423,
"count": 1,
"is_parallel": true,
"self": 0.0008107259995995264,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001893003000304816,
"count": 8,
"is_parallel": true,
"self": 0.001893003000304816
}
}
},
"UnityEnvironment.step": {
"total": 0.0493866120000348,
"count": 1,
"is_parallel": true,
"self": 0.0006584220002423535,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004785950000041339,
"count": 1,
"is_parallel": true,
"self": 0.0004785950000041339
},
"communicator.exchange": {
"total": 0.04646213999990323,
"count": 1,
"is_parallel": true,
"self": 0.04646213999990323
},
"steps_from_proto": {
"total": 0.0017874549998850853,
"count": 1,
"is_parallel": true,
"self": 0.00036026699945068685,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014271880004343984,
"count": 8,
"is_parallel": true,
"self": 0.0014271880004343984
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1367.139608207036,
"count": 63566,
"is_parallel": true,
"self": 36.90651828203204,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 25.64739327100051,
"count": 63566,
"is_parallel": true,
"self": 25.64739327100051
},
"communicator.exchange": {
"total": 1190.1476048189675,
"count": 63566,
"is_parallel": true,
"self": 1190.1476048189675
},
"steps_from_proto": {
"total": 114.43809183503595,
"count": 63566,
"is_parallel": true,
"self": 23.783321238078088,
"children": {
"_process_rank_one_or_two_observation": {
"total": 90.65477059695786,
"count": 508528,
"is_parallel": true,
"self": 90.65477059695786
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 690.9107232579825,
"count": 63567,
"self": 2.9840141529971334,
"children": {
"process_trajectory": {
"total": 121.65125984698489,
"count": 63567,
"self": 121.35381263698491,
"children": {
"RLTrainer._checkpoint": {
"total": 0.297447209999973,
"count": 2,
"self": 0.297447209999973
}
}
},
"_update_policy": {
"total": 566.2754492580004,
"count": 454,
"self": 369.67502843899206,
"children": {
"TorchPPOOptimizer.update": {
"total": 196.60042081900838,
"count": 22788,
"self": 196.60042081900838
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.488000179961091e-06,
"count": 1,
"self": 1.488000179961091e-06
},
"TrainerController._save_models": {
"total": 0.15826299099990138,
"count": 1,
"self": 0.0020084130001123413,
"children": {
"RLTrainer._checkpoint": {
"total": 0.15625457799978903,
"count": 1,
"self": 0.15625457799978903
}
}
}
}
}
}
}