poca-SoccerTwos / run_logs /timers.json
Exyl's picture
First commit (10M steps)`
1269d13 verified
raw
history blame
15.6 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.7492804527282715,
"min": 1.6447088718414307,
"max": 3.2956857681274414,
"count": 1000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 35993.1953125,
"min": 16808.376953125,
"max": 282005.25,
"count": 1000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 70.11267605633803,
"min": 39.46341463414634,
"max": 999.0,
"count": 1000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19912.0,
"min": 9512.0,
"max": 30716.0,
"count": 1000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1635.006448301862,
"min": 1193.8427735999428,
"max": 1670.0006921214163,
"count": 983
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 232170.91565886443,
"min": 2387.6855471998856,
"max": 397392.9560307429,
"count": 983
},
"SoccerTwos.Step.mean": {
"value": 9999926.0,
"min": 9102.0,
"max": 9999926.0,
"count": 1000
},
"SoccerTwos.Step.sum": {
"value": 9999926.0,
"min": 9102.0,
"max": 9999926.0,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.02269687131047249,
"min": -0.12980227172374725,
"max": 0.18211790919303894,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 3.200258731842041,
"min": -21.870420455932617,
"max": 28.41156578063965,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.02401832863688469,
"min": -0.13073505461215973,
"max": 0.1788453310728073,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 3.3865842819213867,
"min": -22.702056884765625,
"max": 28.13270378112793,
"count": 1000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.014286525706027417,
"min": -0.5833333333333334,
"max": 0.6135071414921965,
"count": 1000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 2.0144001245498657,
"min": -69.43879985809326,
"max": 54.608399748802185,
"count": 1000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.014286525706027417,
"min": -0.5833333333333334,
"max": 0.6135071414921965,
"count": 1000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 2.0144001245498657,
"min": -69.43879985809326,
"max": 54.608399748802185,
"count": 1000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017079885487570817,
"min": 0.009205440878092,
"max": 0.024769278972720108,
"count": 482
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.017079885487570817,
"min": 0.009205440878092,
"max": 0.024769278972720108,
"count": 482
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.08304048900802931,
"min": 0.0002604979153450889,
"max": 0.1260059749086698,
"count": 482
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.08304048900802931,
"min": 0.0002604979153450889,
"max": 0.1260059749086698,
"count": 482
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.08436906014879544,
"min": 0.0002612599297814692,
"max": 0.13014639914035797,
"count": 482
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.08436906014879544,
"min": 0.0002612599297814692,
"max": 0.13014639914035797,
"count": 482
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 482
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 482
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.2,
"max": 0.20000000000000007,
"count": 482
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.2,
"max": 0.20000000000000007,
"count": 482
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 482
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 482
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1706690957",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "C:\\Users\\nilsr\\.conda\\envs\\mlagents\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --num-envs=3 --force",
"mlagents_version": "1.0.0",
"mlagents_envs_version": "1.0.0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.13.1+cu117",
"numpy_version": "1.23.3",
"end_time_seconds": "1706703645"
},
"total": 12689.3556593,
"count": 1,
"self": 0.518565899998066,
"children": {
"run_training.setup": {
"total": 0.2295180999999502,
"count": 1,
"self": 0.2295180999999502
},
"TrainerController.start_learning": {
"total": 12688.6075753,
"count": 1,
"self": 13.264583200225388,
"children": {
"TrainerController._reset_env": {
"total": 7.54071139999769,
"count": 50,
"self": 7.54071139999769
},
"TrainerController.advance": {
"total": 12667.593958599778,
"count": 492104,
"self": 13.851316800048153,
"children": {
"env_step": {
"total": 7208.466190699815,
"count": 492104,
"self": 1558.7075056997264,
"children": {
"SubprocessEnvManager._take_step": {
"total": 5642.456477399468,
"count": 699632,
"self": 95.584958599723,
"children": {
"TorchPolicy.evaluate": {
"total": 5546.871518799745,
"count": 1284461,
"self": 5546.871518799745
}
}
},
"workers": {
"total": 7.302207600620932,
"count": 492103,
"self": 0.0,
"children": {
"worker_root": {
"total": 38029.594289899025,
"count": 699557,
"is_parallel": true,
"self": 31193.259923798723,
"children": {
"steps_from_proto": {
"total": 0.2638703000006899,
"count": 300,
"is_parallel": true,
"self": 0.05720620000101917,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.20666409999967073,
"count": 1200,
"is_parallel": true,
"self": 0.20666409999967073
}
}
},
"UnityEnvironment.step": {
"total": 6836.070495800299,
"count": 699557,
"is_parallel": true,
"self": 375.15103440008716,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 268.7900258992204,
"count": 699557,
"is_parallel": true,
"self": 268.7900258992204
},
"communicator.exchange": {
"total": 5020.891360600281,
"count": 699557,
"is_parallel": true,
"self": 5020.891360600281
},
"steps_from_proto": {
"total": 1171.2380749007102,
"count": 1399114,
"is_parallel": true,
"self": 258.60284860274123,
"children": {
"_process_rank_one_or_two_observation": {
"total": 912.635226297969,
"count": 5596456,
"is_parallel": true,
"self": 912.635226297969
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 5445.276451099915,
"count": 492103,
"self": 107.08344929937812,
"children": {
"process_trajectory": {
"total": 2680.497412800558,
"count": 492103,
"self": 2676.6741822005606,
"children": {
"RLTrainer._checkpoint": {
"total": 3.8232305999972596,
"count": 20,
"self": 3.8232305999972596
}
}
},
"_update_policy": {
"total": 2657.6955889999795,
"count": 482,
"self": 1410.2727498999739,
"children": {
"TorchPOCAOptimizer.update": {
"total": 1247.4228391000056,
"count": 14469,
"self": 1247.4228391000056
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.2999989849049598e-06,
"count": 1,
"self": 1.2999989849049598e-06
},
"TrainerController._save_models": {
"total": 0.20832080000036513,
"count": 1,
"self": 0.006607700001040939,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20171309999932419,
"count": 1,
"self": 0.20171309999932419
}
}
}
}
}
}
}