poca-SoccerTwos / run_logs /timers.json
pratsy's picture
First Push
e2083ef
raw
history blame
15.6 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 2.050253391265869,
"min": 2.034529685974121,
"max": 3.227118968963623,
"count": 411
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 41661.1484375,
"min": 19230.033203125,
"max": 131912.359375,
"count": 411
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 50.864583333333336,
"min": 40.55833333333333,
"max": 999.0,
"count": 411
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19532.0,
"min": 7992.0,
"max": 26396.0,
"count": 411
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1619.3064656883705,
"min": 1206.3464324066229,
"max": 1622.4247989601322,
"count": 386
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 310906.8414121671,
"min": 2412.6928648132457,
"max": 376982.44280891324,
"count": 386
},
"SoccerTwos.Step.mean": {
"value": 4619976.0,
"min": 519681.0,
"max": 4619976.0,
"count": 411
},
"SoccerTwos.Step.sum": {
"value": 4619976.0,
"min": 519681.0,
"max": 4619976.0,
"count": 411
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.05835111811757088,
"min": -0.08314729481935501,
"max": 0.19273114204406738,
"count": 411
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 11.203414916992188,
"min": -13.63615608215332,
"max": 30.840782165527344,
"count": 411
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.0547499842941761,
"min": -0.09096977114677429,
"max": 0.19480246305465698,
"count": 411
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 10.51199722290039,
"min": -14.919042587280273,
"max": 30.88077163696289,
"count": 411
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 411
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 411
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.0062666647136211395,
"min": -0.7777777777777778,
"max": 0.42301250124971074,
"count": 411
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -1.2031996250152588,
"min": -78.96480000019073,
"max": 64.0497996211052,
"count": 411
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.0062666647136211395,
"min": -0.7777777777777778,
"max": 0.42301250124971074,
"count": 411
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -1.2031996250152588,
"min": -78.96480000019073,
"max": 64.0497996211052,
"count": 411
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 411
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 411
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.015313063388263496,
"min": 0.011214571007682632,
"max": 0.023672397252327452,
"count": 195
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.015313063388263496,
"min": 0.011214571007682632,
"max": 0.023672397252327452,
"count": 195
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10252293199300766,
"min": 4.140989418980704e-06,
"max": 0.11862916002670924,
"count": 195
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10252293199300766,
"min": 4.140989418980704e-06,
"max": 0.11862916002670924,
"count": 195
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10426674634218216,
"min": 4.45966087833464e-06,
"max": 0.121265010535717,
"count": 195
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10426674634218216,
"min": 4.45966087833464e-06,
"max": 0.121265010535717,
"count": 195
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 195
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 195
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 195
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 195
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 195
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 195
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1690710715",
"python_version": "3.9.17 (main, Jul 5 2023, 20:47:11) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\praty\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.0.1+cpu",
"numpy_version": "1.21.2",
"end_time_seconds": "1690730866"
},
"total": 20151.2319752,
"count": 1,
"self": 1.5930857000021206,
"children": {
"run_training.setup": {
"total": 0.19156610000000018,
"count": 1,
"self": 0.19156610000000018
},
"TrainerController.start_learning": {
"total": 20149.4473234,
"count": 1,
"self": 11.337723500910215,
"children": {
"TrainerController._reset_env": {
"total": 7.253884600004711,
"count": 22,
"self": 7.253884600004711
},
"TrainerController.advance": {
"total": 20130.55692599908,
"count": 275875,
"self": 11.953038298328465,
"children": {
"env_step": {
"total": 8576.67303360019,
"count": 275875,
"self": 6783.355670400021,
"children": {
"SubprocessEnvManager._take_step": {
"total": 1786.1429430003095,
"count": 275875,
"self": 69.90850140024281,
"children": {
"TorchPolicy.evaluate": {
"total": 1716.2344416000667,
"count": 522378,
"self": 1716.2344416000667
}
}
},
"workers": {
"total": 7.1744201998584725,
"count": 275874,
"self": 0.0,
"children": {
"worker_root": {
"total": 20126.62331700034,
"count": 275874,
"is_parallel": true,
"self": 14632.633021800773,
"children": {
"steps_from_proto": {
"total": 0.06739730000300259,
"count": 44,
"is_parallel": true,
"self": 0.013614499999679452,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.05378280000332314,
"count": 176,
"is_parallel": true,
"self": 0.05378280000332314
}
}
},
"UnityEnvironment.step": {
"total": 5493.922897899563,
"count": 275874,
"is_parallel": true,
"self": 283.60887699985415,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 202.12518460061193,
"count": 275874,
"is_parallel": true,
"self": 202.12518460061193
},
"communicator.exchange": {
"total": 4088.7022474993046,
"count": 275874,
"is_parallel": true,
"self": 4088.7022474993046
},
"steps_from_proto": {
"total": 919.4865887997933,
"count": 551748,
"is_parallel": true,
"self": 182.57454560020005,
"children": {
"_process_rank_one_or_two_observation": {
"total": 736.9120431995932,
"count": 2206992,
"is_parallel": true,
"self": 736.9120431995932
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 11541.930854100561,
"count": 275874,
"self": 74.77104820042223,
"children": {
"process_trajectory": {
"total": 1665.2003485001283,
"count": 275874,
"self": 1663.0265831001298,
"children": {
"RLTrainer._checkpoint": {
"total": 2.1737653999985014,
"count": 8,
"self": 2.1737653999985014
}
}
},
"_update_policy": {
"total": 9801.959457400011,
"count": 196,
"self": 917.6712407999676,
"children": {
"TorchPOCAOptimizer.update": {
"total": 8884.288216600044,
"count": 5880,
"self": 8884.288216600044
}
}
}
}
}
}
},
"trainer_threads": {
"total": 3.4000004234258085e-06,
"count": 1,
"self": 3.4000004234258085e-06
},
"TrainerController._save_models": {
"total": 0.29878590000225813,
"count": 1,
"self": 0.02720229999977164,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2715836000024865,
"count": 1,
"self": 0.2715836000024865
}
}
}
}
}
}
}