poca-SoccerTwos / run_logs /timers.json
kebei's picture
First Push
3c4239e
raw
history blame
20.3 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.347036600112915,
"min": 1.2690355777740479,
"max": 3.295771598815918,
"count": 5000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 27889.046875,
"min": 15704.1669921875,
"max": 154574.09375,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 58.576470588235296,
"min": 40.48360655737705,
"max": 999.0,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19916.0,
"min": 16232.0,
"max": 25176.0,
"count": 5000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1682.3302431070465,
"min": 1180.0864163496158,
"max": 1769.0053013926174,
"count": 4805
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 285996.1413281979,
"min": 2361.1670866252966,
"max": 402097.11283217557,
"count": 4805
},
"SoccerTwos.Step.mean": {
"value": 49999904.0,
"min": 9606.0,
"max": 49999904.0,
"count": 5000
},
"SoccerTwos.Step.sum": {
"value": 49999904.0,
"min": 9606.0,
"max": 49999904.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.03184669837355614,
"min": -0.14862856268882751,
"max": 0.17995670437812805,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -5.382091999053955,
"min": -24.523712158203125,
"max": 24.188234329223633,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.032041966915130615,
"min": -0.15108972787857056,
"max": 0.18417219817638397,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -5.415092468261719,
"min": -24.94078826904297,
"max": 25.135684967041016,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.09725562178876979,
"min": -0.5714285714285714,
"max": 0.5946863632310521,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -16.436200082302094,
"min": -69.38199985027313,
"max": 67.41999995708466,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.09725562178876979,
"min": -0.5714285714285714,
"max": 0.5946863632310521,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -16.436200082302094,
"min": -69.38199985027313,
"max": 67.41999995708466,
"count": 5000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.016061531269224362,
"min": 0.009867382691785073,
"max": 0.025753002444980667,
"count": 2417
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.016061531269224362,
"min": 0.009867382691785073,
"max": 0.025753002444980667,
"count": 2417
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.0999513973792394,
"min": 4.8254725015794973e-08,
"max": 0.12561291108528774,
"count": 2417
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.0999513973792394,
"min": 4.8254725015794973e-08,
"max": 0.12561291108528774,
"count": 2417
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10045713807145755,
"min": 5.4563187982618425e-08,
"max": 0.12718206718564035,
"count": 2417
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10045713807145755,
"min": 5.4563187982618425e-08,
"max": 0.12718206718564035,
"count": 2417
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2417
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2417
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2417
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 2417
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2417
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2417
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1687354224",
"python_version": "3.9.16 (main, May 15 2023, 23:46:34) \n[GCC 11.2.0]",
"command_line_arguments": "/home/kejia/01_work/conda/envs/rl/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1687431977"
},
"total": 77752.132843518,
"count": 1,
"self": 0.2706062859942904,
"children": {
"run_training.setup": {
"total": 0.012899744000151259,
"count": 1,
"self": 0.012899744000151259
},
"TrainerController.start_learning": {
"total": 77751.849337488,
"count": 1,
"self": 63.739841922099004,
"children": {
"TrainerController._reset_env": {
"total": 8.851859242075534,
"count": 250,
"self": 8.851859242075534
},
"TrainerController.advance": {
"total": 77678.95592143483,
"count": 3433199,
"self": 65.65787351402105,
"children": {
"env_step": {
"total": 57810.50003577653,
"count": 3433199,
"self": 42167.6887678804,
"children": {
"SubprocessEnvManager._take_step": {
"total": 15604.982782657968,
"count": 3433199,
"self": 429.27984591477434,
"children": {
"TorchPolicy.evaluate": {
"total": 15175.702936743193,
"count": 6292228,
"self": 15175.702936743193
}
}
},
"workers": {
"total": 37.828485238168014,
"count": 3433199,
"self": 0.0,
"children": {
"worker_root": {
"total": 77611.41751427937,
"count": 3433199,
"is_parallel": true,
"self": 43000.622320891256,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0031274900002244976,
"count": 2,
"is_parallel": true,
"self": 0.0007344740006374195,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.002393015999587078,
"count": 8,
"is_parallel": true,
"self": 0.002393015999587078
}
}
},
"UnityEnvironment.step": {
"total": 0.027826115999687318,
"count": 1,
"is_parallel": true,
"self": 0.0006064789999982168,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004882819998783816,
"count": 1,
"is_parallel": true,
"self": 0.0004882819998783816
},
"communicator.exchange": {
"total": 0.0235298679999687,
"count": 1,
"is_parallel": true,
"self": 0.0235298679999687
},
"steps_from_proto": {
"total": 0.003201486999842018,
"count": 2,
"is_parallel": true,
"self": 0.0011092610002378933,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.002092225999604125,
"count": 8,
"is_parallel": true,
"self": 0.002092225999604125
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 34610.34975866321,
"count": 3433198,
"is_parallel": true,
"self": 1804.5499278566276,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 1190.446759735783,
"count": 3433198,
"is_parallel": true,
"self": 1190.446759735783
},
"communicator.exchange": {
"total": 26051.2008201062,
"count": 3433198,
"is_parallel": true,
"self": 26051.2008201062
},
"steps_from_proto": {
"total": 5564.1522509645965,
"count": 6866396,
"is_parallel": true,
"self": 1192.1637758689494,
"children": {
"_process_rank_one_or_two_observation": {
"total": 4371.988475095647,
"count": 27465584,
"is_parallel": true,
"self": 4371.988475095647
}
}
}
}
},
"steps_from_proto": {
"total": 0.44543472490204294,
"count": 498,
"is_parallel": true,
"self": 0.09592068305028079,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.34951404185176216,
"count": 1992,
"is_parallel": true,
"self": 0.34951404185176216
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 19802.798012144267,
"count": 3433199,
"self": 440.0194110431803,
"children": {
"process_trajectory": {
"total": 8660.09415681116,
"count": 3433199,
"self": 8632.725306058106,
"children": {
"RLTrainer._checkpoint": {
"total": 27.36885075305372,
"count": 100,
"self": 27.36885075305372
}
}
},
"_update_policy": {
"total": 10702.684444289927,
"count": 2417,
"self": 7641.828680424076,
"children": {
"TorchPOCAOptimizer.update": {
"total": 3060.855763865851,
"count": 72510,
"self": 3060.855763865851
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.00000761449337e-06,
"count": 1,
"self": 1.00000761449337e-06
},
"TrainerController._save_models": {
"total": 0.3017138889990747,
"count": 1,
"self": 0.0015715390036348253,
"children": {
"RLTrainer._checkpoint": {
"total": 0.30014234999543987,
"count": 1,
"self": 0.30014234999543987
}
}
}
}
}
}
}