Lamurias's picture
First Push
f0cec3d
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.4295754432678223,
"min": 1.4295754432678223,
"max": 3.2956483364105225,
"count": 2204
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 27722.328125,
"min": 18316.36328125,
"max": 120985.15625,
"count": 2204
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 50.04210526315789,
"min": 37.689922480620154,
"max": 999.0,
"count": 2204
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19016.0,
"min": 4676.0,
"max": 30216.0,
"count": 2204
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1670.8702607693115,
"min": 1200.2269656552999,
"max": 1670.8702607693115,
"count": 2202
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 317465.34954616916,
"min": 2401.4985608882907,
"max": 409782.57697789,
"count": 2202
},
"SoccerTwos.Step.mean": {
"value": 22039986.0,
"min": 9348.0,
"max": 22039986.0,
"count": 2204
},
"SoccerTwos.Step.sum": {
"value": 22039986.0,
"min": 9348.0,
"max": 22039986.0,
"count": 2204
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.03447429835796356,
"min": -0.14727379381656647,
"max": 0.15868721902370453,
"count": 2204
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 6.584590911865234,
"min": -26.709228515625,
"max": 31.771392822265625,
"count": 2204
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.03542308136820793,
"min": -0.14488007128238678,
"max": 0.16164880990982056,
"count": 2204
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 6.765808582305908,
"min": -26.293500900268555,
"max": 30.984846115112305,
"count": 2204
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 2204
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 2204
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.09918952799592343,
"min": -0.676254543391141,
"max": 0.44998596425642046,
"count": 2204
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 18.945199847221375,
"min": -67.1159999370575,
"max": 58.53219944238663,
"count": 2204
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.09918952799592343,
"min": -0.676254543391141,
"max": 0.44998596425642046,
"count": 2204
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 18.945199847221375,
"min": -67.1159999370575,
"max": 58.53219944238663,
"count": 2204
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 2204
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 2204
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017875805279860893,
"min": 0.010085510737068641,
"max": 0.025645279930904506,
"count": 1068
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.017875805279860893,
"min": 0.010085510737068641,
"max": 0.025645279930904506,
"count": 1068
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10840815007686615,
"min": 0.0006131869527356078,
"max": 0.1383328417936961,
"count": 1068
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10840815007686615,
"min": 0.0006131869527356078,
"max": 0.1383328417936961,
"count": 1068
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10949653362234434,
"min": 0.000615147494439346,
"max": 0.14097957462072372,
"count": 1068
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10949653362234434,
"min": 0.000615147494439346,
"max": 0.14097957462072372,
"count": 1068
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 1068
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 1068
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 1068
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 1068
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 1068
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 1068
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1696966922",
"python_version": "3.9.18 (main, Sep 11 2023, 13:41:44) \n[GCC 11.2.0]",
"command_line_arguments": "/home/andre/miniconda3/envs/rl/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.x86_64 --run-id=SoccerTwos3 --no-graphics",
"mlagents_version": "0.30.0",
"mlagents_envs_version": "0.30.0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1697031305"
},
"total": 64383.68118217809,
"count": 1,
"self": 1.2398065680172294,
"children": {
"run_training.setup": {
"total": 0.016844510100781918,
"count": 1,
"self": 0.016844510100781918
},
"TrainerController.start_learning": {
"total": 64382.42453109997,
"count": 1,
"self": 38.92193431360647,
"children": {
"TrainerController._reset_env": {
"total": 3.307890849071555,
"count": 111,
"self": 3.307890849071555
},
"TrainerController.advance": {
"total": 64339.5550213462,
"count": 1520423,
"self": 40.8708538900828,
"children": {
"env_step": {
"total": 36604.495503323735,
"count": 1520423,
"self": 32750.119099378004,
"children": {
"SubprocessEnvManager._take_step": {
"total": 3830.9975482781883,
"count": 1520423,
"self": 196.1993643681053,
"children": {
"TorchPolicy.evaluate": {
"total": 3634.798183910083,
"count": 2768644,
"self": 3634.798183910083
}
}
},
"workers": {
"total": 23.378855667542666,
"count": 1520423,
"self": 0.0,
"children": {
"worker_root": {
"total": 64228.85714706173,
"count": 1520423,
"is_parallel": true,
"self": 35179.90832429123,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0028462449554353952,
"count": 2,
"is_parallel": true,
"self": 0.0007436119485646486,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0021026330068707466,
"count": 8,
"is_parallel": true,
"self": 0.0021026330068707466
}
}
},
"UnityEnvironment.step": {
"total": 0.03850793093442917,
"count": 1,
"is_parallel": true,
"self": 0.0005843029357492924,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005270749097689986,
"count": 1,
"is_parallel": true,
"self": 0.0005270749097689986
},
"communicator.exchange": {
"total": 0.0357780740596354,
"count": 1,
"is_parallel": true,
"self": 0.0357780740596354
},
"steps_from_proto": {
"total": 0.001618479029275477,
"count": 2,
"is_parallel": true,
"self": 0.00032234378159046173,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012961352476850152,
"count": 8,
"is_parallel": true,
"self": 0.0012961352476850152
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 29048.779247753904,
"count": 1520422,
"is_parallel": true,
"self": 830.8873350732028,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 528.2550250960048,
"count": 1520422,
"is_parallel": true,
"self": 528.2550250960048
},
"communicator.exchange": {
"total": 25503.690678836778,
"count": 1520422,
"is_parallel": true,
"self": 25503.690678836778
},
"steps_from_proto": {
"total": 2185.9462087479187,
"count": 3040844,
"is_parallel": true,
"self": 424.74704040167853,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1761.1991683462402,
"count": 12163376,
"is_parallel": true,
"self": 1761.1991683462402
}
}
}
}
},
"steps_from_proto": {
"total": 0.16957501659635454,
"count": 220,
"is_parallel": true,
"self": 0.03266283276025206,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.13691218383610249,
"count": 880,
"is_parallel": true,
"self": 0.13691218383610249
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 27694.18866413238,
"count": 1520423,
"self": 236.6728832940571,
"children": {
"process_trajectory": {
"total": 4355.0125788023,
"count": 1520423,
"self": 4347.310795673635,
"children": {
"RLTrainer._checkpoint": {
"total": 7.701783128664829,
"count": 44,
"self": 7.701783128664829
}
}
},
"_update_policy": {
"total": 23102.503202036023,
"count": 1069,
"self": 2478.9786384809995,
"children": {
"TorchPOCAOptimizer.update": {
"total": 20623.524563555024,
"count": 32083,
"self": 20623.524563555024
}
}
}
}
}
}
},
"trainer_threads": {
"total": 5.912035703659058e-06,
"count": 1,
"self": 5.912035703659058e-06
},
"TrainerController._save_models": {
"total": 0.639678679057397,
"count": 1,
"self": 0.010856325970962644,
"children": {
"RLTrainer._checkpoint": {
"total": 0.6288223530864343,
"count": 1,
"self": 0.6288223530864343
}
}
}
}
}
}
}