poca-SoccerTwos / run_logs /timers.json
taohoang's picture
First Push
ac402b4
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 3.2742669582366943,
"min": 3.2742669582366943,
"max": 3.2957334518432617,
"count": 10
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 70724.1640625,
"min": 31687.91015625,
"max": 105463.46875,
"count": 10
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 769.0,
"min": 511.2857142857143,
"max": 999.0,
"count": 10
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 18456.0,
"min": 13296.0,
"max": 26752.0,
"count": 10
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1196.1531254110819,
"min": 1194.8489699455254,
"max": 1198.7535925691843,
"count": 9
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 4784.6125016443275,
"min": 2390.1285157180587,
"max": 14353.995970830434,
"count": 9
},
"SoccerTwos.Step.mean": {
"value": 99470.0,
"min": 9392.0,
"max": 99470.0,
"count": 10
},
"SoccerTwos.Step.sum": {
"value": 99470.0,
"min": 9392.0,
"max": 99470.0,
"count": 10
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.02711886540055275,
"min": 0.026341447606682777,
"max": 0.07637666165828705,
"count": 10
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 0.35254526138305664,
"min": 0.3221576511859894,
"max": 1.2220265865325928,
"count": 10
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.028054507449269295,
"min": 0.024852896109223366,
"max": 0.07641187310218811,
"count": 10
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": 0.3647086024284363,
"min": 0.3166077733039856,
"max": 1.2225899696350098,
"count": 10
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 10
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 10
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.07027692061204177,
"min": -0.3333333333333333,
"max": 0.10254545645280318,
"count": 10
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -0.913599967956543,
"min": -4.0,
"max": 1.128000020980835,
"count": 10
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.07027692061204177,
"min": -0.3333333333333333,
"max": 0.10254545645280318,
"count": 10
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -0.913599967956543,
"min": -4.0,
"max": 1.128000020980835,
"count": 10
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 10
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 10
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.018664041239147385,
"min": 0.01638758342790728,
"max": 0.019611839847074763,
"count": 4
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.018664041239147385,
"min": 0.01638758342790728,
"max": 0.019611839847074763,
"count": 4
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.0016870414430741221,
"min": 0.0003547371425983411,
"max": 0.006974872729430596,
"count": 4
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.0016870414430741221,
"min": 0.0003547371425983411,
"max": 0.006974872729430596,
"count": 4
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.0017082204828814914,
"min": 0.0003368156564344342,
"max": 0.006454419515406092,
"count": 4
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.0017082204828814914,
"min": 0.0003368156564344342,
"max": 0.006454419515406092,
"count": 4
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 4
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 4
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.2,
"max": 0.20000000000000007,
"count": 4
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.2,
"max": 0.20000000000000007,
"count": 4
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 4
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 4
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1679183301",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/linux/SoccerTwos.x86_64 --force --run-id=SoccerTwos --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1679183878"
},
"total": 577.3796814949999,
"count": 1,
"self": 0.5908887079999658,
"children": {
"run_training.setup": {
"total": 0.23541098400005467,
"count": 1,
"self": 0.23541098400005467
},
"TrainerController.start_learning": {
"total": 576.5533818029999,
"count": 1,
"self": 0.2583465830093701,
"children": {
"TrainerController._reset_env": {
"total": 4.048359729999902,
"count": 1,
"self": 4.048359729999902
},
"TrainerController.advance": {
"total": 571.9471983179906,
"count": 6698,
"self": 0.3294751819823887,
"children": {
"env_step": {
"total": 237.9910622950017,
"count": 6698,
"self": 194.1527566540069,
"children": {
"SubprocessEnvManager._take_step": {
"total": 43.66683541899806,
"count": 6698,
"self": 2.1031760379812567,
"children": {
"TorchPolicy.evaluate": {
"total": 41.563659381016805,
"count": 13330,
"self": 41.563659381016805
}
}
},
"workers": {
"total": 0.1714702219967421,
"count": 6698,
"self": 0.0,
"children": {
"worker_root": {
"total": 574.9202399819994,
"count": 6698,
"is_parallel": true,
"self": 413.53883929999733,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.003467670999953043,
"count": 2,
"is_parallel": true,
"self": 0.0009530439999707596,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0025146269999822835,
"count": 8,
"is_parallel": true,
"self": 0.0025146269999822835
}
}
},
"UnityEnvironment.step": {
"total": 0.048476614000037443,
"count": 1,
"is_parallel": true,
"self": 0.001243397000052937,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.000938368999868544,
"count": 1,
"is_parallel": true,
"self": 0.000938368999868544
},
"communicator.exchange": {
"total": 0.0423584480001864,
"count": 1,
"is_parallel": true,
"self": 0.0423584480001864
},
"steps_from_proto": {
"total": 0.003936399999929563,
"count": 2,
"is_parallel": true,
"self": 0.0007509889996981656,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0031854110002313973,
"count": 8,
"is_parallel": true,
"self": 0.0031854110002313973
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 161.3814006820021,
"count": 6697,
"is_parallel": true,
"self": 8.919849462980665,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 5.479749618011738,
"count": 6697,
"is_parallel": true,
"self": 5.479749618011738
},
"communicator.exchange": {
"total": 118.77992843600305,
"count": 6697,
"is_parallel": true,
"self": 118.77992843600305
},
"steps_from_proto": {
"total": 28.201873165006646,
"count": 13394,
"is_parallel": true,
"self": 5.430200891993991,
"children": {
"_process_rank_one_or_two_observation": {
"total": 22.771672273012655,
"count": 53576,
"is_parallel": true,
"self": 22.771672273012655
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 333.62666084100647,
"count": 6698,
"self": 2.1063119210039076,
"children": {
"process_trajectory": {
"total": 43.91635050900254,
"count": 6698,
"self": 43.91635050900254
},
"_update_policy": {
"total": 287.603998411,
"count": 4,
"self": 21.843839609997985,
"children": {
"TorchPOCAOptimizer.update": {
"total": 265.76015880100204,
"count": 123,
"self": 265.76015880100204
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.76799994733301e-06,
"count": 1,
"self": 1.76799994733301e-06
},
"TrainerController._save_models": {
"total": 0.2994754040000771,
"count": 1,
"self": 0.0025718780000261177,
"children": {
"RLTrainer._checkpoint": {
"total": 0.296903526000051,
"count": 1,
"self": 0.296903526000051
}
}
}
}
}
}
}