poca-SoccerTwos / run_logs /timers.json
fazito25's picture
First Push`
0f30b80 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.4514496326446533,
"min": 1.3197808265686035,
"max": 3.295729637145996,
"count": 4605
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 28982.546875,
"min": 20885.421875,
"max": 124645.5078125,
"count": 4605
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 65.19736842105263,
"min": 39.08943089430894,
"max": 999.0,
"count": 4605
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19820.0,
"min": 16140.0,
"max": 26032.0,
"count": 4605
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1902.078289632654,
"min": 1197.7343049521035,
"max": 1937.2127213221727,
"count": 4554
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 289115.9000241634,
"min": 2395.8239121441106,
"max": 429609.87397075375,
"count": 4554
},
"SoccerTwos.Step.mean": {
"value": 46049987.0,
"min": 9970.0,
"max": 46049987.0,
"count": 4605
},
"SoccerTwos.Step.sum": {
"value": 46049987.0,
"min": 9970.0,
"max": 46049987.0,
"count": 4605
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.027679337188601494,
"min": -0.1588854342699051,
"max": 0.2037215530872345,
"count": 4605
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -4.207259178161621,
"min": -31.618200302124023,
"max": 28.289274215698242,
"count": 4605
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.027536731213331223,
"min": -0.15909075736999512,
"max": 0.19906234741210938,
"count": 4605
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -4.185583114624023,
"min": -31.659059524536133,
"max": 28.695724487304688,
"count": 4605
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 4605
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 4605
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.05736578961736277,
"min": -0.7425999989112219,
"max": 0.5727542877197266,
"count": 4605
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 8.719600021839142,
"min": -66.73919999599457,
"max": 70.90119957923889,
"count": 4605
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.05736578961736277,
"min": -0.7425999989112219,
"max": 0.5727542877197266,
"count": 4605
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 8.719600021839142,
"min": -66.73919999599457,
"max": 70.90119957923889,
"count": 4605
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 4605
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 4605
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.016332187781032795,
"min": 0.009722032710851636,
"max": 0.02553258594125509,
"count": 2230
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.016332187781032795,
"min": 0.009722032710851636,
"max": 0.02553258594125509,
"count": 2230
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09716136579712233,
"min": 2.0628411865194113e-06,
"max": 0.12346688583493233,
"count": 2230
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09716136579712233,
"min": 2.0628411865194113e-06,
"max": 0.12346688583493233,
"count": 2230
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.09787180299560229,
"min": 2.1913840024960035e-06,
"max": 0.12617241218686104,
"count": 2230
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.09787180299560229,
"min": 2.1913840024960035e-06,
"max": 0.12617241218686104,
"count": 2230
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2230
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2230
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2230
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2230
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2230
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2230
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1709930517",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\TE576752\\AppData\\Local\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1710077327"
},
"total": 146810.34652289998,
"count": 1,
"self": 0.2515708999999333,
"children": {
"run_training.setup": {
"total": 0.3718903999997565,
"count": 1,
"self": 0.3718903999997565
},
"TrainerController.start_learning": {
"total": 146809.72306159997,
"count": 1,
"self": 87.84930279676337,
"children": {
"TrainerController._reset_env": {
"total": 11.484286600094492,
"count": 231,
"self": 11.484286600094492
},
"TrainerController.advance": {
"total": 146710.19641710314,
"count": 3164035,
"self": 91.90805912215728,
"children": {
"env_step": {
"total": 61425.1935253849,
"count": 3164035,
"self": 47962.29276300903,
"children": {
"SubprocessEnvManager._take_step": {
"total": 13406.549605785065,
"count": 3164035,
"self": 475.0261422809417,
"children": {
"TorchPolicy.evaluate": {
"total": 12931.523463504123,
"count": 5788492,
"self": 12931.523463504123
}
}
},
"workers": {
"total": 56.351156590811115,
"count": 3164035,
"self": 0.0,
"children": {
"worker_root": {
"total": 146693.84032621628,
"count": 3164035,
"is_parallel": true,
"self": 108561.78913623143,
"children": {
"steps_from_proto": {
"total": 0.4293091999243188,
"count": 462,
"is_parallel": true,
"self": 0.08332269923903368,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.34598650068528514,
"count": 1848,
"is_parallel": true,
"self": 0.34598650068528514
}
}
},
"UnityEnvironment.step": {
"total": 38131.62188078492,
"count": 3164034,
"is_parallel": true,
"self": 2103.0905682064404,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 1658.51511100885,
"count": 3164034,
"is_parallel": true,
"self": 1658.51511100885
},
"communicator.exchange": {
"total": 28200.717900798067,
"count": 3164034,
"is_parallel": true,
"self": 28200.717900798067
},
"steps_from_proto": {
"total": 6169.298300771569,
"count": 6328068,
"is_parallel": true,
"self": 1199.5712652587526,
"children": {
"_process_rank_one_or_two_observation": {
"total": 4969.727035512817,
"count": 25312272,
"is_parallel": true,
"self": 4969.727035512817
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 85193.09483259609,
"count": 3164034,
"self": 629.4849681738706,
"children": {
"process_trajectory": {
"total": 13955.542832022082,
"count": 3164034,
"self": 13941.54434032202,
"children": {
"RLTrainer._checkpoint": {
"total": 13.998491700062914,
"count": 92,
"self": 13.998491700062914
}
}
},
"_update_policy": {
"total": 70608.06703240014,
"count": 2230,
"self": 6778.810363801313,
"children": {
"TorchPOCAOptimizer.update": {
"total": 63829.25666859883,
"count": 66900,
"self": 63829.25666859883
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.4999823179095984e-06,
"count": 1,
"self": 1.4999823179095984e-06
},
"TrainerController._save_models": {
"total": 0.19305359999998473,
"count": 1,
"self": 0.006676999997580424,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1863766000024043,
"count": 1,
"self": 0.1863766000024043
}
}
}
}
}
}
}