ppo-Pyramids / run_logs /timers.json
dyedream's picture
Push to Hub
a54fd18
raw
history blame
18.7 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.37305524945259094,
"min": 0.37305524945259094,
"max": 1.5489435195922852,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 11317.00390625,
"min": 11317.00390625,
"max": 46988.75,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989952.0,
"min": 29952.0,
"max": 989952.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989952.0,
"min": 29952.0,
"max": 989952.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5687825083732605,
"min": -0.25425416231155396,
"max": 0.6240326166152954,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 157.55274963378906,
"min": -60.258235931396484,
"max": 175.97720336914062,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.007374450098723173,
"min": -0.06463734805583954,
"max": 0.20690801739692688,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -2.042722702026367,
"min": -16.741073608398438,
"max": 49.86483383178711,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06977546833583066,
"min": 0.0633343095375624,
"max": 0.07390616189583114,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9768565567016292,
"min": 0.4902912792257949,
"max": 1.0815248631988652,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015160610663033803,
"min": 0.0011020373078210852,
"max": 0.015160610663033803,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.21224854928247325,
"min": 0.01359967492163689,
"max": 0.21224854928247325,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.61101174874286e-06,
"min": 7.61101174874286e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010655416448240004,
"min": 0.00010655416448240004,
"max": 0.0035078204307265994,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10253697142857143,
"min": 0.10253697142857143,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4355176,
"min": 1.3886848,
"max": 2.6175899000000005,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002634434457142858,
"min": 0.0002634434457142858,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0036882082400000018,
"min": 0.0036882082400000018,
"max": 0.11695041266000002,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.008062896318733692,
"min": 0.0080373864620924,
"max": 0.3131609857082367,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.11288054287433624,
"min": 0.1125234067440033,
"max": 2.192126989364624,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 330.2631578947368,
"min": 326.20652173913044,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31375.0,
"min": 15984.0,
"max": 34700.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6257808347173193,
"min": -1.0000000521540642,
"max": 1.6703695450787959,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 152.82339846342802,
"min": -28.937001734972,
"max": 153.67399814724922,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6257808347173193,
"min": -1.0000000521540642,
"max": 1.6703695450787959,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 152.82339846342802,
"min": -28.937001734972,
"max": 153.67399814724922,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.027549504300584383,
"min": 0.027549504300584383,
"max": 5.990333755500615,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.589653404254932,
"min": 2.3761983209988102,
"max": 95.84534008800983,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1688361345",
"python_version": "3.10.12 (main, Jun 7 2023, 12:45:35) [GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1688363512"
},
"total": 2166.606332537,
"count": 1,
"self": 0.47462073099995905,
"children": {
"run_training.setup": {
"total": 0.038272824000159744,
"count": 1,
"self": 0.038272824000159744
},
"TrainerController.start_learning": {
"total": 2166.093438982,
"count": 1,
"self": 1.2669112710213994,
"children": {
"TrainerController._reset_env": {
"total": 4.275054532000013,
"count": 1,
"self": 4.275054532000013
},
"TrainerController.advance": {
"total": 2160.4508361169787,
"count": 64128,
"self": 1.2628299109496766,
"children": {
"env_step": {
"total": 1547.456354456043,
"count": 64128,
"self": 1442.5580124451587,
"children": {
"SubprocessEnvManager._take_step": {
"total": 104.16146118692996,
"count": 64128,
"self": 4.479133668941358,
"children": {
"TorchPolicy.evaluate": {
"total": 99.6823275179886,
"count": 62566,
"self": 99.6823275179886
}
}
},
"workers": {
"total": 0.7368808239543796,
"count": 64128,
"self": 0.0,
"children": {
"worker_root": {
"total": 2161.540317221974,
"count": 64128,
"is_parallel": true,
"self": 825.0584730829705,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0019061569998939376,
"count": 1,
"is_parallel": true,
"self": 0.0005582949995641684,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013478620003297692,
"count": 8,
"is_parallel": true,
"self": 0.0013478620003297692
}
}
},
"UnityEnvironment.step": {
"total": 0.08825939000007565,
"count": 1,
"is_parallel": true,
"self": 0.0005601300001671916,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004624030000286439,
"count": 1,
"is_parallel": true,
"self": 0.0004624030000286439
},
"communicator.exchange": {
"total": 0.08548500999995667,
"count": 1,
"is_parallel": true,
"self": 0.08548500999995667
},
"steps_from_proto": {
"total": 0.0017518469999231456,
"count": 1,
"is_parallel": true,
"self": 0.00034567200009405497,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014061749998290907,
"count": 8,
"is_parallel": true,
"self": 0.0014061749998290907
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1336.4818441390034,
"count": 64127,
"is_parallel": true,
"self": 32.06007387007776,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 21.923908174949247,
"count": 64127,
"is_parallel": true,
"self": 21.923908174949247
},
"communicator.exchange": {
"total": 1184.9648444419197,
"count": 64127,
"is_parallel": true,
"self": 1184.9648444419197
},
"steps_from_proto": {
"total": 97.53301765205674,
"count": 64127,
"is_parallel": true,
"self": 18.637282830100048,
"children": {
"_process_rank_one_or_two_observation": {
"total": 78.8957348219567,
"count": 513016,
"is_parallel": true,
"self": 78.8957348219567
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 611.7316517499862,
"count": 64128,
"self": 2.466526309999608,
"children": {
"process_trajectory": {
"total": 106.12636464798584,
"count": 64128,
"self": 105.8734203219858,
"children": {
"RLTrainer._checkpoint": {
"total": 0.25294432600003347,
"count": 2,
"self": 0.25294432600003347
}
}
},
"_update_policy": {
"total": 503.13876079200077,
"count": 453,
"self": 317.9802672890178,
"children": {
"TorchPPOOptimizer.update": {
"total": 185.158493502983,
"count": 22782,
"self": 185.158493502983
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0680000741558615e-06,
"count": 1,
"self": 1.0680000741558615e-06
},
"TrainerController._save_models": {
"total": 0.100635993999731,
"count": 1,
"self": 0.00140892299987172,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09922707099985928,
"count": 1,
"self": 0.09922707099985928
}
}
}
}
}
}
}