poca-SoccerTwos / run_logs /timers.json
kalmi901's picture
First Push
0ec0791 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.6701158285140991,
"min": 1.6587228775024414,
"max": 3.29573130607605,
"count": 725
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 30516.357421875,
"min": 16613.8046875,
"max": 178980.65625,
"count": 725
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 49.282828282828284,
"min": 35.595588235294116,
"max": 999.0,
"count": 725
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19516.0,
"min": 12772.0,
"max": 28016.0,
"count": 725
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1557.265374110577,
"min": 1198.5330205252608,
"max": 1588.0020185317587,
"count": 680
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 308338.54407389427,
"min": 2399.1962815952575,
"max": 409448.62126481585,
"count": 680
},
"SoccerTwos.Step.mean": {
"value": 7249992.0,
"min": 9026.0,
"max": 7249992.0,
"count": 725
},
"SoccerTwos.Step.sum": {
"value": 7249992.0,
"min": 9026.0,
"max": 7249992.0,
"count": 725
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.026756681501865387,
"min": -0.12756748497486115,
"max": 0.17237921059131622,
"count": 725
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 5.297822952270508,
"min": -24.875659942626953,
"max": 30.08611488342285,
"count": 725
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.01810976304113865,
"min": -0.1257050484418869,
"max": 0.16898171603679657,
"count": 725
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 3.58573317527771,
"min": -24.512483596801758,
"max": 29.562793731689453,
"count": 725
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 725
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 725
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.0841818174930534,
"min": -0.6153846153846154,
"max": 0.5067600041627884,
"count": 725
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 16.667999863624573,
"min": -52.45539999008179,
"max": 65.7007999420166,
"count": 725
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.0841818174930534,
"min": -0.6153846153846154,
"max": 0.5067600041627884,
"count": 725
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 16.667999863624573,
"min": -52.45539999008179,
"max": 65.7007999420166,
"count": 725
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 725
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 725
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.018028397635013484,
"min": 0.010461795031248281,
"max": 0.024881355188942203,
"count": 348
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.018028397635013484,
"min": 0.010461795031248281,
"max": 0.024881355188942203,
"count": 348
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10629347438613573,
"min": 1.589391454596504e-07,
"max": 0.12507142946124078,
"count": 348
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10629347438613573,
"min": 1.589391454596504e-07,
"max": 0.12507142946124078,
"count": 348
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10934197281797726,
"min": 1.9186231270396092e-07,
"max": 0.1278058871626854,
"count": 348
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10934197281797726,
"min": 1.9186231270396092e-07,
"max": 0.1278058871626854,
"count": 348
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 348
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 348
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 348
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 348
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 348
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 348
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1727173758",
"python_version": "3.10.12 (main, Sep 11 2024, 15:47:36) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos/SoccerTwos.x86_64 --run-id=SoccerTwos --no-graphics --force",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.4.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1727190579"
},
"total": 16820.303834783,
"count": 1,
"self": 0.13345043200388318,
"children": {
"run_training.setup": {
"total": 0.051617436000015005,
"count": 1,
"self": 0.051617436000015005
},
"TrainerController.start_learning": {
"total": 16820.118766915,
"count": 1,
"self": 11.377075497650367,
"children": {
"TrainerController._reset_env": {
"total": 4.294128538998052,
"count": 37,
"self": 4.294128538998052
},
"TrainerController.advance": {
"total": 16804.151323784354,
"count": 498753,
"self": 12.082525273013744,
"children": {
"env_step": {
"total": 12949.70809076626,
"count": 498753,
"self": 9456.580573319003,
"children": {
"SubprocessEnvManager._take_step": {
"total": 3486.655585516228,
"count": 498753,
"self": 77.85140495581027,
"children": {
"TorchPolicy.evaluate": {
"total": 3408.8041805604175,
"count": 916844,
"self": 3408.8041805604175
}
}
},
"workers": {
"total": 6.4719319310295305,
"count": 498752,
"self": 0.0,
"children": {
"worker_root": {
"total": 16792.618799273245,
"count": 498752,
"is_parallel": true,
"self": 8834.781746554789,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.003693558000009034,
"count": 2,
"is_parallel": true,
"self": 0.0009371319999900152,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.002756426000019019,
"count": 8,
"is_parallel": true,
"self": 0.002756426000019019
}
}
},
"UnityEnvironment.step": {
"total": 0.03822099900003195,
"count": 1,
"is_parallel": true,
"self": 0.0010993349999353086,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0007964679999759028,
"count": 1,
"is_parallel": true,
"self": 0.0007964679999759028
},
"communicator.exchange": {
"total": 0.0329992660000471,
"count": 1,
"is_parallel": true,
"self": 0.0329992660000471
},
"steps_from_proto": {
"total": 0.0033259300000736403,
"count": 2,
"is_parallel": true,
"self": 0.0005812719999767069,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0027446580000969334,
"count": 8,
"is_parallel": true,
"self": 0.0027446580000969334
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 7957.752285106464,
"count": 498751,
"is_parallel": true,
"self": 488.25413560435663,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 330.8674484480965,
"count": 498751,
"is_parallel": true,
"self": 330.8674484480965
},
"communicator.exchange": {
"total": 5602.609950587758,
"count": 498751,
"is_parallel": true,
"self": 5602.609950587758
},
"steps_from_proto": {
"total": 1536.020750466253,
"count": 997502,
"is_parallel": true,
"self": 259.1321704771001,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1276.888579989153,
"count": 3990008,
"is_parallel": true,
"self": 1276.888579989153
}
}
}
}
},
"steps_from_proto": {
"total": 0.0847676119915377,
"count": 72,
"is_parallel": true,
"self": 0.01620631099444836,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.06856130099708935,
"count": 288,
"is_parallel": true,
"self": 0.06856130099708935
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 3842.3607077450783,
"count": 498752,
"self": 90.9317398571734,
"children": {
"process_trajectory": {
"total": 1775.0596767289287,
"count": 498752,
"self": 1771.9084069749326,
"children": {
"RLTrainer._checkpoint": {
"total": 3.1512697539960755,
"count": 14,
"self": 3.1512697539960755
}
}
},
"_update_policy": {
"total": 1976.369291158976,
"count": 348,
"self": 1168.7769840309502,
"children": {
"TorchPOCAOptimizer.update": {
"total": 807.5923071280258,
"count": 10440,
"self": 807.5923071280258
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.5949990483932197e-06,
"count": 1,
"self": 1.5949990483932197e-06
},
"TrainerController._save_models": {
"total": 0.29623749899838003,
"count": 1,
"self": 0.0029462849997798912,
"children": {
"RLTrainer._checkpoint": {
"total": 0.29329121399860014,
"count": 1,
"self": 0.29329121399860014
}
}
}
}
}
}
}