poca-SoccerTwos / run_logs /timers.json
EladAssia's picture
First Push`
60903e9
raw
history blame
15.7 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.6622945070266724,
"min": 1.4777714014053345,
"max": 2.3602654933929443,
"count": 1792
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 32660.76171875,
"min": 26372.533203125,
"max": 52087.3359375,
"count": 1792
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 69.5774647887324,
"min": 43.214285714285715,
"max": 135.5142857142857,
"count": 1792
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19760.0,
"min": 11968.0,
"max": 21432.0,
"count": 1792
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1429.6834391114112,
"min": 1267.8483300636103,
"max": 1442.8017261441944,
"count": 1792
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 203015.0483538204,
"min": 90404.93167926311,
"max": 304220.6827741176,
"count": 1792
},
"SoccerTwos.Step.mean": {
"value": 20109806.0,
"min": 2199998.0,
"max": 20109806.0,
"count": 1792
},
"SoccerTwos.Step.sum": {
"value": 20109806.0,
"min": 2199998.0,
"max": 20109806.0,
"count": 1792
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.04578428715467453,
"min": -0.13799507915973663,
"max": 0.09785564988851547,
"count": 1792
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -6.501368522644043,
"min": -26.63304901123047,
"max": 17.735788345336914,
"count": 1792
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.04887549951672554,
"min": -0.13961166143417358,
"max": 0.09776446223258972,
"count": 1792
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -6.94032096862793,
"min": -26.945051193237305,
"max": 17.878185272216797,
"count": 1792
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1792
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1792
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.12329718176747712,
"min": -0.41600384563207626,
"max": 0.36480800008773806,
"count": 1792
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -17.50819981098175,
"min": -52.78160011768341,
"max": 45.601000010967255,
"count": 1792
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.12329718176747712,
"min": -0.41600384563207626,
"max": 0.36480800008773806,
"count": 1792
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -17.50819981098175,
"min": -52.78160011768341,
"max": 45.601000010967255,
"count": 1792
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1792
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1792
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.012768109767542531,
"min": 0.010367057458400572,
"max": 0.02548085546004586,
"count": 869
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.012768109767542531,
"min": 0.010367057458400572,
"max": 0.02548085546004586,
"count": 869
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.09177817876140276,
"min": 0.05979832634329796,
"max": 0.12440450837214788,
"count": 869
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.09177817876140276,
"min": 0.05979832634329796,
"max": 0.12440450837214788,
"count": 869
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.09319816902279854,
"min": 0.061159982532262805,
"max": 0.12736019790172576,
"count": 869
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.09319816902279854,
"min": 0.061159982532262805,
"max": 0.12736019790172576,
"count": 869
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 869
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 869
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 869
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 869
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 869
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 869
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1696277873",
"python_version": "3.9.18 (main, Sep 11 2023, 14:09:26) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\assia\\Anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.0.1+cpu",
"numpy_version": "1.21.2",
"end_time_seconds": "1696365061"
},
"total": 87188.3295095,
"count": 1,
"self": 2.304758000012953,
"children": {
"run_training.setup": {
"total": 0.11412949999999977,
"count": 1,
"self": 0.11412949999999977
},
"TrainerController.start_learning": {
"total": 87185.910622,
"count": 1,
"self": 42.97262080095243,
"children": {
"TrainerController._reset_env": {
"total": 11.842777299992319,
"count": 359,
"self": 11.842777299992319
},
"TrainerController.advance": {
"total": 87130.83902779905,
"count": 1249186,
"self": 43.913307196169626,
"children": {
"env_step": {
"total": 34529.831184197974,
"count": 1249186,
"self": 27557.149798199607,
"children": {
"SubprocessEnvManager._take_step": {
"total": 6945.387012902138,
"count": 1249186,
"self": 221.06731429970478,
"children": {
"TorchPolicy.evaluate": {
"total": 6724.319698602433,
"count": 2277496,
"self": 6724.319698602433
}
}
},
"workers": {
"total": 27.29437309622666,
"count": 1249186,
"self": 0.0,
"children": {
"worker_root": {
"total": 87058.09205769956,
"count": 1249186,
"is_parallel": true,
"self": 64241.860530789745,
"children": {
"steps_from_proto": {
"total": 0.9765216000583141,
"count": 718,
"is_parallel": true,
"self": 0.18875550028448984,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.7877660997738243,
"count": 2872,
"is_parallel": true,
"self": 0.7877660997738243
}
}
},
"UnityEnvironment.step": {
"total": 22815.25500530975,
"count": 1249186,
"is_parallel": true,
"self": 1124.5716741985998,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 765.9207953082126,
"count": 1249186,
"is_parallel": true,
"self": 765.9207953082126
},
"communicator.exchange": {
"total": 17210.207485297262,
"count": 1249186,
"is_parallel": true,
"self": 17210.207485297262
},
"steps_from_proto": {
"total": 3714.555050505679,
"count": 2498372,
"is_parallel": true,
"self": 720.1168485956996,
"children": {
"_process_rank_one_or_two_observation": {
"total": 2994.4382019099794,
"count": 9993488,
"is_parallel": true,
"self": 2994.4382019099794
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 52557.094536404904,
"count": 1249186,
"self": 286.5318229020704,
"children": {
"process_trajectory": {
"total": 6838.996242502752,
"count": 1249186,
"self": 6831.211235102757,
"children": {
"RLTrainer._checkpoint": {
"total": 7.7850073999945835,
"count": 36,
"self": 7.7850073999945835
}
}
},
"_update_policy": {
"total": 45431.56647100008,
"count": 870,
"self": 2983.052892099724,
"children": {
"TorchPOCAOptimizer.update": {
"total": 42448.513578900354,
"count": 26096,
"self": 42448.513578900354
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.3999961083754897e-06,
"count": 1,
"self": 1.3999961083754897e-06
},
"TrainerController._save_models": {
"total": 0.2561947000067448,
"count": 1,
"self": 0.010259700007736683,
"children": {
"RLTrainer._checkpoint": {
"total": 0.24593499999900814,
"count": 1,
"self": 0.24593499999900814
}
}
}
}
}
}
}