poca-SoccerTwos / run_logs /timers.json
VicBeltran's picture
Second Version Push
326fd90
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.6468571424484253,
"min": 1.620234727859497,
"max": 3.2958309650421143,
"count": 1628
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 34518.125,
"min": 20440.189453125,
"max": 113663.703125,
"count": 1628
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 129.81578947368422,
"min": 73.53731343283582,
"max": 999.0,
"count": 1628
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19732.0,
"min": 3996.0,
"max": 29968.0,
"count": 1628
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1283.8520896738241,
"min": 1186.7449991089873,
"max": 1292.4892967539745,
"count": 1623
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 97572.75881521063,
"min": 2373.4899982179745,
"max": 168127.30851741313,
"count": 1623
},
"SoccerTwos.Step.mean": {
"value": 16279960.0,
"min": 9678.0,
"max": 16279960.0,
"count": 1628
},
"SoccerTwos.Step.sum": {
"value": 16279960.0,
"min": 9678.0,
"max": 16279960.0,
"count": 1628
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.0008682604529894888,
"min": -0.1442578285932541,
"max": 0.15217438340187073,
"count": 1628
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 0.06598779559135437,
"min": -13.073843002319336,
"max": 15.521787643432617,
"count": 1628
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.004778760951012373,
"min": -0.1425202637910843,
"max": 0.15821237862110138,
"count": 1628
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.3631858229637146,
"min": -13.25438404083252,
"max": 16.137662887573242,
"count": 1628
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1628
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1628
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.05728421242613541,
"min": -0.6153846153846154,
"max": 0.5539310389551623,
"count": 1628
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 4.3536001443862915,
"min": -49.283599853515625,
"max": 37.99500036239624,
"count": 1628
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.05728421242613541,
"min": -0.6153846153846154,
"max": 0.5539310389551623,
"count": 1628
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 4.3536001443862915,
"min": -49.283599853515625,
"max": 37.99500036239624,
"count": 1628
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1628
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1628
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.06345888649711924,
"min": 0.014404559491958934,
"max": 0.06696532642235979,
"count": 492
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.06345888649711924,
"min": 0.014404559491958934,
"max": 0.06696532642235979,
"count": 492
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.0351909302175045,
"min": 0.0010329000597266713,
"max": 0.05442078304477036,
"count": 492
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.0351909302175045,
"min": 0.0010329000597266713,
"max": 0.05442078304477036,
"count": 492
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.03798762366641313,
"min": 0.0010328145443054381,
"max": 0.0717328670900315,
"count": 492
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.03798762366641313,
"min": 0.0010328145443054381,
"max": 0.0717328670900315,
"count": 492
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003837498200625461,
"min": 0.0003837498200625461,
"max": 0.0003999655760086059,
"count": 492
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003837498200625461,
"min": 0.0003837498200625461,
"max": 0.0003999655760086059,
"count": 492
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.19593745400000004,
"min": 0.19593745400000004,
"max": 0.19999139400000002,
"count": 492
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.19593745400000004,
"min": 0.19593745400000004,
"max": 0.19999139400000002,
"count": 492
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.004797278954599999,
"min": 0.004797278954599999,
"max": 0.0049995705606,
"count": 492
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.004797278954599999,
"min": 0.004797278954599999,
"max": 0.0049995705606,
"count": 492
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1692499201",
"python_version": "3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]",
"command_line_arguments": "/home/victor/.local/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.exe --run-id=SoccerTwosV2 --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1692537155"
},
"total": 37954.460568632,
"count": 1,
"self": 0.0043710970057873055,
"children": {
"run_training.setup": {
"total": 0.050531484997918596,
"count": 1,
"self": 0.050531484997918596
},
"TrainerController.start_learning": {
"total": 37954.40566604999,
"count": 1,
"self": 22.15096920770884,
"children": {
"TrainerController._reset_env": {
"total": 3.2845096030177956,
"count": 9,
"self": 3.2845096030177956
},
"TrainerController.advance": {
"total": 37928.455804075245,
"count": 1069677,
"self": 20.294769567037292,
"children": {
"env_step": {
"total": 25022.445690085602,
"count": 1069677,
"self": 16364.367602574126,
"children": {
"SubprocessEnvManager._take_step": {
"total": 8644.768758229864,
"count": 1069677,
"self": 152.85128560423618,
"children": {
"TorchPolicy.evaluate": {
"total": 8491.917472625628,
"count": 2037238,
"self": 8491.917472625628
}
}
},
"workers": {
"total": 13.309329281612008,
"count": 1069677,
"self": 0.0,
"children": {
"worker_root": {
"total": 37890.355768174464,
"count": 1069677,
"is_parallel": true,
"self": 23835.39968843469,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0022201940009836107,
"count": 2,
"is_parallel": true,
"self": 0.0006396969947672915,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0015804970062163193,
"count": 8,
"is_parallel": true,
"self": 0.0015804970062163193
}
}
},
"UnityEnvironment.step": {
"total": 0.022530100999574643,
"count": 1,
"is_parallel": true,
"self": 0.0003971999976783991,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0003426000002946239,
"count": 1,
"is_parallel": true,
"self": 0.0003426000002946239
},
"communicator.exchange": {
"total": 0.020514301002549473,
"count": 1,
"is_parallel": true,
"self": 0.020514301002549473
},
"steps_from_proto": {
"total": 0.0012759999990521464,
"count": 2,
"is_parallel": true,
"self": 0.00028509999901871197,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0009909000000334345,
"count": 8,
"is_parallel": true,
"self": 0.0009909000000334345
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 14054.945545716775,
"count": 1069676,
"is_parallel": true,
"self": 441.5165943916036,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 323.9747311235078,
"count": 1069676,
"is_parallel": true,
"self": 323.9747311235078
},
"communicator.exchange": {
"total": 11894.385197817792,
"count": 1069676,
"is_parallel": true,
"self": 11894.385197817792
},
"steps_from_proto": {
"total": 1395.069022383872,
"count": 2139352,
"is_parallel": true,
"self": 296.86871285547386,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1098.200309528398,
"count": 8557408,
"is_parallel": true,
"self": 1098.200309528398
}
}
}
}
},
"steps_from_proto": {
"total": 0.01053402300021844,
"count": 16,
"is_parallel": true,
"self": 0.0022184619956533425,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.008315561004565097,
"count": 64,
"is_parallel": true,
"self": 0.008315561004565097
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 12885.715344422606,
"count": 1069677,
"self": 156.89693492608785,
"children": {
"process_trajectory": {
"total": 4022.541019220531,
"count": 1069677,
"self": 4013.206508934527,
"children": {
"RLTrainer._checkpoint": {
"total": 9.334510286003933,
"count": 16,
"self": 9.334510286003933
}
}
},
"_update_policy": {
"total": 8706.277390275987,
"count": 493,
"self": 3299.628536370419,
"children": {
"TorchPOCAOptimizer.update": {
"total": 5406.648853905568,
"count": 39440,
"self": 5406.648853905568
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.700012944638729e-06,
"count": 1,
"self": 1.700012944638729e-06
},
"TrainerController._save_models": {
"total": 0.514381464003236,
"count": 1,
"self": 0.009308854991104454,
"children": {
"RLTrainer._checkpoint": {
"total": 0.5050726090121316,
"count": 1,
"self": 0.5050726090121316
}
}
}
}
}
}
}