poca-SoccerTwos / run_logs /timers.json
KayabaEngine's picture
POCA
322ef5f
raw
history blame
15.7 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 2.6160085201263428,
"min": 2.5309042930603027,
"max": 3.2958028316497803,
"count": 1200
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 28127.32421875,
"min": 1142.884033203125,
"max": 103250.796875,
"count": 1200
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 140.41176470588235,
"min": 71.33333333333333,
"max": 999.0,
"count": 1200
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 9548.0,
"min": 3996.0,
"max": 15984.0,
"count": 1200
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1293.0965716918265,
"min": 1181.2921910083294,
"max": 1299.4242601266078,
"count": 924
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 43965.2834375221,
"min": 2365.186011911509,
"max": 85501.7490598387,
"count": 924
},
"SoccerTwos.Step.mean": {
"value": 5999994.0,
"min": 4064.0,
"max": 5999994.0,
"count": 1200
},
"SoccerTwos.Step.sum": {
"value": 5999994.0,
"min": 4064.0,
"max": 5999994.0,
"count": 1200
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.0478287935256958,
"min": -0.07056863605976105,
"max": 0.0990050733089447,
"count": 1200
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 1.5783501863479614,
"min": -3.28068470954895,
"max": 3.465177536010742,
"count": 1200
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.047932833433151245,
"min": -0.07096714526414871,
"max": 0.10634422302246094,
"count": 1200
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 1.5817835330963135,
"min": -3.3354556560516357,
"max": 3.722047805786133,
"count": 1200
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1200
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1200
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.3155103104584145,
"min": -0.6666666766007742,
"max": 0.5640228674525306,
"count": 1200
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 10.411840245127678,
"min": -20.109760403633118,
"max": 13.496320486068726,
"count": 1200
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.3943878809611003,
"min": -0.8333333333333334,
"max": 0.7050285736719767,
"count": 1200
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 13.014800071716309,
"min": -25.137200117111206,
"max": 16.870399951934814,
"count": 1200
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1200
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1200
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.023117861443461153,
"min": 0.021430131281699797,
"max": 0.02991539443033483,
"count": 143
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.023117861443461153,
"min": 0.021430131281699797,
"max": 0.02991539443033483,
"count": 143
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.018350337455049156,
"min": 0.00023209207139643695,
"max": 0.30159230546389776,
"count": 143
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.018350337455049156,
"min": 0.00023209207139643695,
"max": 0.30159230546389776,
"count": 143
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.01843568489421159,
"min": 0.0002673177294468587,
"max": 0.04010890867055689,
"count": 143
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.01843568489421159,
"min": 0.0002673177294468587,
"max": 0.04010890867055689,
"count": 143
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 2.2414328850666505e-06,
"min": 2.2414328850666505e-06,
"max": 0.0004963543340624668,
"count": 143
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 2.2414328850666505e-06,
"min": 2.2414328850666505e-06,
"max": 0.0004963543340624668,
"count": 143
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.1,
"min": 0.1,
"max": 0.1,
"count": 143
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.1,
"min": 0.1,
"max": 0.1,
"count": 143
},
"SoccerTwos.Policy.Beta.mean": {
"value": 5.47818399999997e-05,
"min": 5.47818399999997e-05,
"max": 0.009927159579999997,
"count": 143
},
"SoccerTwos.Policy.Beta.sum": {
"value": 5.47818399999997e-05,
"min": 5.47818399999997e-05,
"max": 0.009927159579999997,
"count": 143
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1697705386",
"python_version": "3.10.10 | packaged by Anaconda, Inc. | (main, Mar 21 2023, 18:39:17) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\uzi\\.conda\\envs\\mlagents\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.0+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1697744881"
},
"total": 39494.87471559999,
"count": 1,
"self": 1.6276554999640211,
"children": {
"run_training.setup": {
"total": 0.06667810003273189,
"count": 1,
"self": 0.06667810003273189
},
"TrainerController.start_learning": {
"total": 39493.18038199999,
"count": 1,
"self": 5.84308449953096,
"children": {
"TrainerController._reset_env": {
"total": 7.211642399954144,
"count": 60,
"self": 7.211642399954144
},
"TrainerController.advance": {
"total": 39479.90466690052,
"count": 402382,
"self": 6.35884719598107,
"children": {
"env_step": {
"total": 5687.911550901015,
"count": 402382,
"self": 3965.454874078918,
"children": {
"SubprocessEnvManager._take_step": {
"total": 1718.4236765305395,
"count": 402382,
"self": 36.77869198168628,
"children": {
"TorchPolicy.evaluate": {
"total": 1681.6449845488532,
"count": 793854,
"self": 1681.6449845488532
}
}
},
"workers": {
"total": 4.033000291557983,
"count": 402382,
"self": 0.0,
"children": {
"worker_root": {
"total": 39478.93575890345,
"count": 402382,
"is_parallel": true,
"self": 36286.98941390769,
"children": {
"steps_from_proto": {
"total": 0.06628599960822612,
"count": 120,
"is_parallel": true,
"self": 0.015201199566945434,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.05108480004128069,
"count": 480,
"is_parallel": true,
"self": 0.05108480004128069
}
}
},
"UnityEnvironment.step": {
"total": 3191.8800589961465,
"count": 402382,
"is_parallel": true,
"self": 126.63717696699314,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 108.40029989677714,
"count": 402382,
"is_parallel": true,
"self": 108.40029989677714
},
"communicator.exchange": {
"total": 2530.9522374179214,
"count": 402382,
"is_parallel": true,
"self": 2530.9522374179214
},
"steps_from_proto": {
"total": 425.89034471445484,
"count": 804764,
"is_parallel": true,
"self": 92.46972756035393,
"children": {
"_process_rank_one_or_two_observation": {
"total": 333.4206171541009,
"count": 3219056,
"is_parallel": true,
"self": 333.4206171541009
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 33785.63426880352,
"count": 402382,
"self": 48.6818082081154,
"children": {
"process_trajectory": {
"total": 1255.7951576953637,
"count": 402382,
"self": 1253.0429551952984,
"children": {
"RLTrainer._checkpoint": {
"total": 2.752202500065323,
"count": 12,
"self": 2.752202500065323
}
}
},
"_update_policy": {
"total": 32481.157302900043,
"count": 143,
"self": 942.4680457977229,
"children": {
"TorchPOCAOptimizer.update": {
"total": 31538.68925710232,
"count": 28860,
"self": 31538.68925710232
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.00006091594696e-07,
"count": 1,
"self": 8.00006091594696e-07
},
"TrainerController._save_models": {
"total": 0.22098739998182282,
"count": 1,
"self": 0.007894299982581288,
"children": {
"RLTrainer._checkpoint": {
"total": 0.21309309999924153,
"count": 1,
"self": 0.21309309999924153
}
}
}
}
}
}
}