aphi's picture
First Push
ba75fc1
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.5208332538604736,
"min": 1.273158311843872,
"max": 1.8148019313812256,
"count": 1678
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 29832.6640625,
"min": 20223.357421875,
"max": 40978.0546875,
"count": 1678
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 63.51315789473684,
"min": 43.981651376146786,
"max": 136.02702702702703,
"count": 1678
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19308.0,
"min": 10900.0,
"max": 21472.0,
"count": 1678
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1586.8200921264854,
"min": 1542.8708725938334,
"max": 1668.9721804534734,
"count": 1678
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 241196.6540032258,
"min": 117899.30230656838,
"max": 365068.93665123876,
"count": 1678
},
"SoccerTwos.Step.mean": {
"value": 26719946.0,
"min": 9949994.0,
"max": 26719946.0,
"count": 1678
},
"SoccerTwos.Step.sum": {
"value": 26719946.0,
"min": 9949994.0,
"max": 26719946.0,
"count": 1678
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.06350585073232651,
"min": -0.14680367708206177,
"max": 0.0801720917224884,
"count": 1678
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -9.652889251708984,
"min": -27.305482864379883,
"max": 14.987405776977539,
"count": 1678
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.06474441289901733,
"min": -0.14830423891544342,
"max": 0.08140476793050766,
"count": 1678
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -9.841150283813477,
"min": -27.58458709716797,
"max": 15.016227722167969,
"count": 1678
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1678
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1678
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.1389289466958297,
"min": -0.4145650787959023,
"max": 0.3313132074643981,
"count": 1678
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -21.117199897766113,
"min": -63.71439999341965,
"max": 51.0037996172905,
"count": 1678
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.1389289466958297,
"min": -0.4145650787959023,
"max": 0.3313132074643981,
"count": 1678
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -21.117199897766113,
"min": -63.71439999341965,
"max": 51.0037996172905,
"count": 1678
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1678
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1678
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01615842728157683,
"min": 0.009192483994168773,
"max": 0.026691062779476245,
"count": 813
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01615842728157683,
"min": 0.009192483994168773,
"max": 0.026691062779476245,
"count": 813
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09005015144745508,
"min": 0.06957109409073989,
"max": 0.12221147641539573,
"count": 813
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09005015144745508,
"min": 0.06957109409073989,
"max": 0.12221147641539573,
"count": 813
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.0914334607621034,
"min": 0.07065122574567795,
"max": 0.12482641264796257,
"count": 813
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.0914334607621034,
"min": 0.07065122574567795,
"max": 0.12482641264796257,
"count": 813
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 813
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 813
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 813
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 813
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 813
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 813
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1691280965",
"python_version": "3.9.9 (main, Dec 22 2021, 19:41:22) \n[GCC 10.3.0]",
"command_line_arguments": "/home/aphi/.pyenv/versions/3.9.9/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1691317518"
},
"total": 36553.451603412,
"count": 1,
"self": 0.07956845599983353,
"children": {
"run_training.setup": {
"total": 0.05672447699907934,
"count": 1,
"self": 0.05672447699907934
},
"TrainerController.start_learning": {
"total": 36553.315310479,
"count": 1,
"self": 25.90310476511513,
"children": {
"TrainerController._reset_env": {
"total": 2.8752524210576667,
"count": 85,
"self": 2.8752524210576667
},
"TrainerController.advance": {
"total": 36524.34532972085,
"count": 1154518,
"self": 27.165245878706628,
"children": {
"env_step": {
"total": 18919.483706302977,
"count": 1154518,
"self": 16086.282047817207,
"children": {
"SubprocessEnvManager._take_step": {
"total": 2817.332583169118,
"count": 1154518,
"self": 122.10931460303254,
"children": {
"TorchPolicy.evaluate": {
"total": 2695.2232685660856,
"count": 2105992,
"self": 2695.2232685660856
}
}
},
"workers": {
"total": 15.869075316652015,
"count": 1154517,
"self": 0.0,
"children": {
"worker_root": {
"total": 36513.4823779097,
"count": 1154517,
"is_parallel": true,
"self": 23220.272478314437,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.001969711003766861,
"count": 2,
"is_parallel": true,
"self": 0.000470619008410722,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014990919953561388,
"count": 8,
"is_parallel": true,
"self": 0.0014990919953561388
}
}
},
"UnityEnvironment.step": {
"total": 0.03214666799613042,
"count": 1,
"is_parallel": true,
"self": 0.0006364009896060452,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0007950849976623431,
"count": 1,
"is_parallel": true,
"self": 0.0007950849976623431
},
"communicator.exchange": {
"total": 0.027651450000121258,
"count": 1,
"is_parallel": true,
"self": 0.027651450000121258
},
"steps_from_proto": {
"total": 0.0030637320087407716,
"count": 2,
"is_parallel": true,
"self": 0.0007643250064575113,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0022994070022832602,
"count": 8,
"is_parallel": true,
"self": 0.0022994070022832602
}
}
}
}
}
}
},
"steps_from_proto": {
"total": 0.1485878519670223,
"count": 168,
"is_parallel": true,
"self": 0.029665567053598352,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.11892228491342394,
"count": 672,
"is_parallel": true,
"self": 0.11892228491342394
}
}
},
"UnityEnvironment.step": {
"total": 13293.061311743295,
"count": 1154516,
"is_parallel": true,
"self": 792.2785328326936,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 670.7080909666547,
"count": 1154516,
"is_parallel": true,
"self": 670.7080909666547
},
"communicator.exchange": {
"total": 9608.185685382246,
"count": 1154516,
"is_parallel": true,
"self": 9608.185685382246
},
"steps_from_proto": {
"total": 2221.8890025617,
"count": 2309032,
"is_parallel": true,
"self": 437.1007224857385,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1784.7882800759617,
"count": 9236128,
"is_parallel": true,
"self": 1784.7882800759617
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 17577.696377539163,
"count": 1154517,
"self": 181.50837569407304,
"children": {
"process_trajectory": {
"total": 3242.301654148134,
"count": 1154517,
"self": 3235.886169627105,
"children": {
"RLTrainer._checkpoint": {
"total": 6.415484521028702,
"count": 34,
"self": 6.415484521028702
}
}
},
"_update_policy": {
"total": 14153.886347696956,
"count": 814,
"self": 2519.5350972263477,
"children": {
"TorchPOCAOptimizer.update": {
"total": 11634.351250470609,
"count": 24420,
"self": 11634.351250470609
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.581996912136674e-06,
"count": 1,
"self": 1.581996912136674e-06
},
"TrainerController._save_models": {
"total": 0.19162198998674285,
"count": 1,
"self": 0.0018692279845708981,
"children": {
"RLTrainer._checkpoint": {
"total": 0.18975276200217195,
"count": 1,
"self": 0.18975276200217195
}
}
}
}
}
}
}