varunsappa's picture
Upload 6 files
f451613
raw
history blame
15.7 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.4086549282073975,
"min": 1.268043875694275,
"max": 3.295724630355835,
"count": 5000
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 28759.099609375,
"min": 21151.21484375,
"max": 111021.765625,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 79.79032258064517,
"min": 41.63247863247863,
"max": 999.0,
"count": 5000
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19788.0,
"min": 9664.0,
"max": 31244.0,
"count": 5000
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1829.12120939836,
"min": 1191.1156991376893,
"max": 1906.1469234347953,
"count": 4994
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 226811.02996539662,
"min": 2383.338683115511,
"max": 430477.6972377616,
"count": 4994
},
"SoccerTwos.Step.mean": {
"value": 49999945.0,
"min": 9638.0,
"max": 49999945.0,
"count": 5000
},
"SoccerTwos.Step.sum": {
"value": 49999945.0,
"min": 9638.0,
"max": 49999945.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.03521855175495148,
"min": -0.165052130818367,
"max": 0.23691850900650024,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -4.367100238800049,
"min": -26.408340454101562,
"max": 35.004268646240234,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.034728504717350006,
"min": -0.1632629930973053,
"max": 0.23600859940052032,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -4.306334495544434,
"min": -26.122079849243164,
"max": 33.70362091064453,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.1485548355887013,
"min": -0.638063160996688,
"max": 0.43167058860554414,
"count": 5000
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -18.420799612998962,
"min": -66.41400015354156,
"max": 64.83860021829605,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.1485548355887013,
"min": -0.638063160996688,
"max": 0.43167058860554414,
"count": 5000
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -18.420799612998962,
"min": -66.41400015354156,
"max": 64.83860021829605,
"count": 5000
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 5000
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.019478699524188414,
"min": 0.00937927095995595,
"max": 0.025816674375285707,
"count": 2425
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.019478699524188414,
"min": 0.00937927095995595,
"max": 0.025816674375285707,
"count": 2425
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.08596547668178876,
"min": 0.00021378911915235222,
"max": 0.12492119148373604,
"count": 2425
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.08596547668178876,
"min": 0.00021378911915235222,
"max": 0.12492119148373604,
"count": 2425
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.08699496537446975,
"min": 0.00021644697577964203,
"max": 0.12640473643938702,
"count": 2425
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.08699496537446975,
"min": 0.00021644697577964203,
"max": 0.12640473643938702,
"count": 2425
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2425
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 2425
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 2425
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000004,
"max": 0.20000000000000007,
"count": 2425
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2425
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 2425
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1698650611",
"python_version": "3.10.10 | packaged by Anaconda, Inc. | (main, Mar 21 2023, 18:39:17) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\Ignacio\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn .\\config\\poca\\SoccerTwos.yaml --env=.\\training-envs-executables\\SoccerTwos.exe --run-id=ScoeerTwos --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.0+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1698743886"
},
"total": 93274.29332550001,
"count": 1,
"self": 0.27556440002808813,
"children": {
"run_training.setup": {
"total": 0.07127410000248346,
"count": 1,
"self": 0.07127410000248346
},
"TrainerController.start_learning": {
"total": 93273.94648699998,
"count": 1,
"self": 58.83607880471391,
"children": {
"TrainerController._reset_env": {
"total": 8.17233990020759,
"count": 250,
"self": 8.17233990020759
},
"TrainerController.advance": {
"total": 93206.82897949505,
"count": 3448391,
"self": 52.582258994531,
"children": {
"env_step": {
"total": 39581.76548850494,
"count": 3448391,
"self": 30913.340164013513,
"children": {
"SubprocessEnvManager._take_step": {
"total": 8633.625579697109,
"count": 3448391,
"self": 321.80235908659233,
"children": {
"TorchPolicy.evaluate": {
"total": 8311.823220610517,
"count": 6280544,
"self": 8311.823220610517
}
}
},
"workers": {
"total": 34.79974479431985,
"count": 3448391,
"self": 0.0,
"children": {
"worker_root": {
"total": 93202.42310158622,
"count": 3448391,
"is_parallel": true,
"self": 68869.10959709574,
"children": {
"steps_from_proto": {
"total": 0.29738700023153797,
"count": 500,
"is_parallel": true,
"self": 0.06373980031639803,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.23364719991513994,
"count": 2000,
"is_parallel": true,
"self": 0.23364719991513994
}
}
},
"UnityEnvironment.step": {
"total": 24333.016117490246,
"count": 3448391,
"is_parallel": true,
"self": 1171.1086369622499,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 997.1881673770258,
"count": 3448391,
"is_parallel": true,
"self": 997.1881673770258
},
"communicator.exchange": {
"total": 18612.42163566519,
"count": 3448391,
"is_parallel": true,
"self": 18612.42163566519
},
"steps_from_proto": {
"total": 3552.2976774857816,
"count": 6896782,
"is_parallel": true,
"self": 734.322668517023,
"children": {
"_process_rank_one_or_two_observation": {
"total": 2817.9750089687586,
"count": 27587128,
"is_parallel": true,
"self": 2817.9750089687586
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 53572.48123199558,
"count": 3448391,
"self": 498.42538041890657,
"children": {
"process_trajectory": {
"total": 10664.200104576332,
"count": 3448391,
"self": 10653.466261676323,
"children": {
"RLTrainer._checkpoint": {
"total": 10.733842900008312,
"count": 100,
"self": 10.733842900008312
}
}
},
"_update_policy": {
"total": 42409.85574700034,
"count": 2425,
"self": 6294.645024598241,
"children": {
"TorchPOCAOptimizer.update": {
"total": 36115.2107224021,
"count": 72759,
"self": 36115.2107224021
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.00006091594696e-07,
"count": 1,
"self": 8.00006091594696e-07
},
"TrainerController._save_models": {
"total": 0.10908799999742769,
"count": 1,
"self": 0.004449700005352497,
"children": {
"RLTrainer._checkpoint": {
"total": 0.10463829999207519,
"count": 1,
"self": 0.10463829999207519
}
}
}
}
}
}
}