ppo-Pyramids-1 / run_logs /timers.json
PrithviS's picture
First Push
7cb3e75 verified
raw
history blame
18.7 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.732119619846344,
"min": 0.7186362743377686,
"max": 1.4985047578811646,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 21705.8828125,
"min": 21432.607421875,
"max": 45458.640625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989881.0,
"min": 29952.0,
"max": 989881.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989881.0,
"min": 29952.0,
"max": 989881.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.21934671700000763,
"min": -0.107887402176857,
"max": 0.24968639016151428,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 55.71406555175781,
"min": -25.892976760864258,
"max": 64.16940307617188,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.01428283378481865,
"min": -0.0028907437808811665,
"max": 0.1138405054807663,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 3.6278398036956787,
"min": -0.6851062774658203,
"max": 27.435562133789062,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06751152820721644,
"min": 0.06435350195145556,
"max": 0.07355268837972771,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9451613949010302,
"min": 0.48188125453327363,
"max": 1.043961114999975,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.009542512685497754,
"min": 3.149713154853647e-05,
"max": 0.013559358486894607,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.13359517759696854,
"min": 0.0002834741839368282,
"max": 0.1898310188165245,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.6244260299857105e-06,
"min": 7.6244260299857105e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010674196441979995,
"min": 0.00010674196441979995,
"max": 0.0025974253341915997,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10254144285714285,
"min": 0.10254144285714285,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4355802,
"min": 1.3886848,
"max": 2.1913343,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026389014142857135,
"min": 0.00026389014142857135,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003694461979999999,
"min": 0.003694461979999999,
"max": 0.08659425915999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.007800370920449495,
"min": 0.007800370920449495,
"max": 0.26026371121406555,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.10920519381761551,
"min": 0.10816393047571182,
"max": 1.8218460083007812,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 635.8085106382979,
"min": 585.9444444444445,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29883.0,
"min": 15984.0,
"max": 32961.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 0.8959574114452017,
"min": -1.0000000521540642,
"max": 1.1917481272032968,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 42.10999833792448,
"min": -31.99920167028904,
"max": 64.35439886897802,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 0.8959574114452017,
"min": -1.0000000521540642,
"max": 1.1917481272032968,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 42.10999833792448,
"min": -31.99920167028904,
"max": 64.35439886897802,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.052254310346820314,
"min": 0.05041510572751846,
"max": 6.400271826423705,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.4559525863005547,
"min": 1.7988217025995255,
"max": 102.40434922277927,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1717144661",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.0+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1717146603"
},
"total": 1942.1668145609997,
"count": 1,
"self": 0.6300001509998765,
"children": {
"run_training.setup": {
"total": 0.08387950099995578,
"count": 1,
"self": 0.08387950099995578
},
"TrainerController.start_learning": {
"total": 1941.4529349089999,
"count": 1,
"self": 1.2427246619113248,
"children": {
"TrainerController._reset_env": {
"total": 2.66682578200016,
"count": 1,
"self": 2.66682578200016
},
"TrainerController.advance": {
"total": 1937.4197996340886,
"count": 63175,
"self": 1.2777192430612558,
"children": {
"env_step": {
"total": 1320.3516024949963,
"count": 63175,
"self": 1194.2856554618702,
"children": {
"SubprocessEnvManager._take_step": {
"total": 125.32499061105,
"count": 63175,
"self": 4.375731789038127,
"children": {
"TorchPolicy.evaluate": {
"total": 120.94925882201187,
"count": 62553,
"self": 120.94925882201187
}
}
},
"workers": {
"total": 0.740956422076124,
"count": 63175,
"self": 0.0,
"children": {
"worker_root": {
"total": 1937.1857722879927,
"count": 63175,
"is_parallel": true,
"self": 853.6947031099367,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0027836150002258364,
"count": 1,
"is_parallel": true,
"self": 0.0007376380003734084,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.002045976999852428,
"count": 8,
"is_parallel": true,
"self": 0.002045976999852428
}
}
},
"UnityEnvironment.step": {
"total": 0.04955032299994855,
"count": 1,
"is_parallel": true,
"self": 0.0006210080000528251,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005180419998396246,
"count": 1,
"is_parallel": true,
"self": 0.0005180419998396246
},
"communicator.exchange": {
"total": 0.04678118599986192,
"count": 1,
"is_parallel": true,
"self": 0.04678118599986192
},
"steps_from_proto": {
"total": 0.0016300870001941803,
"count": 1,
"is_parallel": true,
"self": 0.0003778209998017701,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012522660003924102,
"count": 8,
"is_parallel": true,
"self": 0.0012522660003924102
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1083.491069178056,
"count": 63174,
"is_parallel": true,
"self": 32.33792323498119,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.71441549209203,
"count": 63174,
"is_parallel": true,
"self": 22.71441549209203
},
"communicator.exchange": {
"total": 933.5516532139027,
"count": 63174,
"is_parallel": true,
"self": 933.5516532139027
},
"steps_from_proto": {
"total": 94.88707723708012,
"count": 63174,
"is_parallel": true,
"self": 18.63369781083111,
"children": {
"_process_rank_one_or_two_observation": {
"total": 76.25337942624901,
"count": 505392,
"is_parallel": true,
"self": 76.25337942624901
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 615.790477896031,
"count": 63175,
"self": 2.0201169819956704,
"children": {
"process_trajectory": {
"total": 119.81405286104246,
"count": 63175,
"self": 119.57973079304247,
"children": {
"RLTrainer._checkpoint": {
"total": 0.23432206799998312,
"count": 2,
"self": 0.23432206799998312
}
}
},
"_update_policy": {
"total": 493.9563080529929,
"count": 414,
"self": 291.5795498030616,
"children": {
"TorchPPOOptimizer.update": {
"total": 202.3767582499313,
"count": 22965,
"self": 202.3767582499313
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.4090001059230417e-06,
"count": 1,
"self": 1.4090001059230417e-06
},
"TrainerController._save_models": {
"total": 0.1235834219996832,
"count": 1,
"self": 0.0019075529999099672,
"children": {
"RLTrainer._checkpoint": {
"total": 0.12167586899977323,
"count": 1,
"self": 0.12167586899977323
}
}
}
}
}
}
}