poca-SoccerTwos / run_logs /timers.json
nokotin's picture
First commit
dceb4a3
raw
history blame
15.6 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.840072512626648,
"min": 1.5899217128753662,
"max": 2.4978463649749756,
"count": 761
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 37390.2734375,
"min": 12050.388671875,
"max": 53777.86328125,
"count": 761
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 59.142857142857146,
"min": 38.196850393700785,
"max": 91.75,
"count": 761
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19872.0,
"min": 3236.0,
"max": 21184.0,
"count": 761
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1529.7088135388522,
"min": 1422.9264545281249,
"max": 1573.7982628040145,
"count": 761
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 256991.08067452715,
"min": 42878.86507428949,
"max": 382782.2583887068,
"count": 761
},
"SoccerTwos.Step.mean": {
"value": 10029872.0,
"min": 2429979.0,
"max": 10029872.0,
"count": 761
},
"SoccerTwos.Step.sum": {
"value": 10029872.0,
"min": 2429979.0,
"max": 10029872.0,
"count": 761
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.031066184863448143,
"min": -0.1180698424577713,
"max": 0.1087309867143631,
"count": 761
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -5.219119071960449,
"min": -21.02879524230957,
"max": 19.492877960205078,
"count": 761
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.035260315984487534,
"min": -0.11848477274179459,
"max": 0.1105712503194809,
"count": 761
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -5.923733234405518,
"min": -20.94308090209961,
"max": 19.144439697265625,
"count": 761
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 761
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 761
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.2327690479301271,
"min": -0.32567314182009016,
"max": 0.32442905250208337,
"count": 761
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -39.10520005226135,
"min": -57.35360014438629,
"max": 58.072800397872925,
"count": 761
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.2327690479301271,
"min": -0.32567314182009016,
"max": 0.32442905250208337,
"count": 761
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -39.10520005226135,
"min": -57.35360014438629,
"max": 58.072800397872925,
"count": 761
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 761
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 761
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.017179827320311838,
"min": 0.010565199363675977,
"max": 0.02532601624261588,
"count": 369
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.017179827320311838,
"min": 0.010565199363675977,
"max": 0.02532601624261588,
"count": 369
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.0999237209558487,
"min": 0.07022709188361963,
"max": 0.12925801649689675,
"count": 369
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.0999237209558487,
"min": 0.07022709188361963,
"max": 0.12925801649689675,
"count": 369
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10240304668744406,
"min": 0.07139496964712938,
"max": 0.13219926357269288,
"count": 369
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10240304668744406,
"min": 0.07139496964712938,
"max": 0.13219926357269288,
"count": 369
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 369
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 369
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 369
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 369
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 369
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 369
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1691417556",
"python_version": "3.8.17 (default, Jul 5 2023, 20:44:21) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "C:\\Users\\nokot\\miniconda3\\envs\\ml-agents\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.30.0",
"mlagents_envs_version": "0.30.0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.7.1+cpu",
"numpy_version": "1.21.2",
"end_time_seconds": "1691501578"
},
"total": 84023.94156210001,
"count": 1,
"self": 0.33657540001149755,
"children": {
"run_training.setup": {
"total": 0.15175470000000013,
"count": 1,
"self": 0.15175470000000013
},
"TrainerController.start_learning": {
"total": 84023.453232,
"count": 1,
"self": 30.030664897349197,
"children": {
"TrainerController._reset_env": {
"total": 10.714601099975752,
"count": 39,
"self": 10.714601099975752
},
"TrainerController.advance": {
"total": 83982.26639350268,
"count": 527687,
"self": 31.88441820141452,
"children": {
"env_step": {
"total": 21282.278971898933,
"count": 527687,
"self": 16135.11157879619,
"children": {
"SubprocessEnvManager._take_step": {
"total": 5127.16464080181,
"count": 527687,
"self": 177.39852979446914,
"children": {
"TorchPolicy.evaluate": {
"total": 4949.76611100734,
"count": 954018,
"self": 4949.76611100734
}
}
},
"workers": {
"total": 20.002752300934617,
"count": 527687,
"self": 0.0,
"children": {
"worker_root": {
"total": 83969.881665199,
"count": 527687,
"is_parallel": true,
"self": 71320.51937559372,
"children": {
"steps_from_proto": {
"total": 0.19051710006376155,
"count": 78,
"is_parallel": true,
"self": 0.04021390001721237,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.15030320004654918,
"count": 312,
"is_parallel": true,
"self": 0.15030320004654918
}
}
},
"UnityEnvironment.step": {
"total": 12649.171772505219,
"count": 527687,
"is_parallel": true,
"self": 709.8374647020391,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 705.6238761024485,
"count": 527687,
"is_parallel": true,
"self": 705.6238761024485
},
"communicator.exchange": {
"total": 8715.422255598512,
"count": 527687,
"is_parallel": true,
"self": 8715.422255598512
},
"steps_from_proto": {
"total": 2518.2881761022186,
"count": 1055374,
"is_parallel": true,
"self": 544.6141842024838,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1973.6739918997348,
"count": 4221496,
"is_parallel": true,
"self": 1973.6739918997348
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 62668.10300340233,
"count": 527687,
"self": 174.5810705012409,
"children": {
"process_trajectory": {
"total": 23944.859248801124,
"count": 527687,
"self": 23938.519073401123,
"children": {
"RLTrainer._checkpoint": {
"total": 6.340175399999907,
"count": 16,
"self": 6.340175399999907
}
}
},
"_update_policy": {
"total": 38548.66268409997,
"count": 369,
"self": 2733.2539645000797,
"children": {
"TorchPOCAOptimizer.update": {
"total": 35815.40871959989,
"count": 11070,
"self": 35815.40871959989
}
}
}
}
}
}
},
"trainer_threads": {
"total": 2.7999922167509794e-06,
"count": 1,
"self": 2.7999922167509794e-06
},
"TrainerController._save_models": {
"total": 0.44156970000767615,
"count": 1,
"self": 0.01958220000960864,
"children": {
"RLTrainer._checkpoint": {
"total": 0.4219874999980675,
"count": 1,
"self": 0.4219874999980675
}
}
}
}
}
}
}