siemr's picture
v1
7e7c27e verified
raw
history blame
18.7 kB
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.8658556342124939,
"min": 0.8568869233131409,
"max": 1.3608918190002441,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 26169.62109375,
"min": 25473.53515625,
"max": 41284.015625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989884.0,
"min": 29952.0,
"max": 989884.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989884.0,
"min": 29952.0,
"max": 989884.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.03867529332637787,
"min": -0.11774829030036926,
"max": 0.08777497708797455,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 9.475446701049805,
"min": -28.495086669921875,
"max": 20.802669525146484,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.021831199526786804,
"min": -0.036457180976867676,
"max": 0.44904839992523193,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -5.348643779754639,
"min": -9.004923820495605,
"max": 106.42446899414062,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.0665718631436383,
"min": 0.06585706145270612,
"max": 0.07380919246817776,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9320060840109363,
"min": 0.5038065833988634,
"max": 1.0333286945544886,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.005783018441253371,
"min": 8.590104548686253e-05,
"max": 0.005783018441253371,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.0809622581775472,
"min": 0.0011167135913292129,
"max": 0.0809622581775472,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.633918883964288e-06,
"min": 7.633918883964288e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010687486437550002,
"min": 0.00010687486437550002,
"max": 0.0031440710519763998,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10254460714285717,
"min": 0.10254460714285717,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4356245000000003,
"min": 1.3691136000000002,
"max": 2.3480236000000003,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002642062535714286,
"min": 0.0002642062535714286,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003698887550000001,
"min": 0.003698887550000001,
"max": 0.10482755764,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.008972590789198875,
"min": 0.00880782026797533,
"max": 0.38569894433021545,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.12561626732349396,
"min": 0.12330947816371918,
"max": 2.699892520904541,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 842.1351351351351,
"min": 832.7567567567568,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31159.0,
"min": 15984.0,
"max": 32651.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": -0.13980003828937942,
"min": -1.0000000521540642,
"max": 0.049949954781267375,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": -5.172601416707039,
"min": -32.000001668930054,
"max": 1.7981983721256256,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": -0.13980003828937942,
"min": -1.0000000521540642,
"max": 0.049949954781267375,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": -5.172601416707039,
"min": -32.000001668930054,
"max": 1.7981983721256256,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.07859579662200869,
"min": 0.07859579662200869,
"max": 7.631403457373381,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.9080444750143215,
"min": 2.711899575719144,
"max": 122.10245531797409,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1711927043",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1711928708"
},
"total": 1664.5762981699997,
"count": 1,
"self": 0.32260139199979676,
"children": {
"run_training.setup": {
"total": 0.05536682199999632,
"count": 1,
"self": 0.05536682199999632
},
"TrainerController.start_learning": {
"total": 1664.198329956,
"count": 1,
"self": 1.5714458640072735,
"children": {
"TrainerController._reset_env": {
"total": 2.958404883999947,
"count": 1,
"self": 2.958404883999947
},
"TrainerController.advance": {
"total": 1659.5869754989926,
"count": 63010,
"self": 1.5951992899620109,
"children": {
"env_step": {
"total": 1063.315044602011,
"count": 63010,
"self": 922.0231450651331,
"children": {
"SubprocessEnvManager._take_step": {
"total": 140.31523106992654,
"count": 63010,
"self": 4.801478914909012,
"children": {
"TorchPolicy.evaluate": {
"total": 135.51375215501753,
"count": 62559,
"self": 135.51375215501753
}
}
},
"workers": {
"total": 0.9766684669514234,
"count": 63010,
"self": 0.0,
"children": {
"worker_root": {
"total": 1661.4855698470174,
"count": 63010,
"is_parallel": true,
"self": 848.8472684300114,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0021712180000577064,
"count": 1,
"is_parallel": true,
"self": 0.0006778430001759261,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014933749998817802,
"count": 8,
"is_parallel": true,
"self": 0.0014933749998817802
}
}
},
"UnityEnvironment.step": {
"total": 0.03606010100020285,
"count": 1,
"is_parallel": true,
"self": 0.0004457350000848237,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0003705759997956193,
"count": 1,
"is_parallel": true,
"self": 0.0003705759997956193
},
"communicator.exchange": {
"total": 0.034031902000151604,
"count": 1,
"is_parallel": true,
"self": 0.034031902000151604
},
"steps_from_proto": {
"total": 0.0012118880001708021,
"count": 1,
"is_parallel": true,
"self": 0.0002852079999229318,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0009266800002478703,
"count": 8,
"is_parallel": true,
"self": 0.0009266800002478703
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 812.638301417006,
"count": 63009,
"is_parallel": true,
"self": 24.96046847192065,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 15.038778053086617,
"count": 63009,
"is_parallel": true,
"self": 15.038778053086617
},
"communicator.exchange": {
"total": 705.7792937270658,
"count": 63009,
"is_parallel": true,
"self": 705.7792937270658
},
"steps_from_proto": {
"total": 66.85976116493293,
"count": 63009,
"is_parallel": true,
"self": 14.941342634938337,
"children": {
"_process_rank_one_or_two_observation": {
"total": 51.9184185299946,
"count": 504072,
"is_parallel": true,
"self": 51.9184185299946
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 594.6767316070195,
"count": 63010,
"self": 2.846937189087839,
"children": {
"process_trajectory": {
"total": 117.57600385193018,
"count": 63010,
"self": 117.31392448593033,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2620793659998526,
"count": 2,
"self": 0.2620793659998526
}
}
},
"_update_policy": {
"total": 474.2537905660015,
"count": 431,
"self": 274.0311528389941,
"children": {
"TorchPPOOptimizer.update": {
"total": 200.2226377270074,
"count": 22842,
"self": 200.2226377270074
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.030000021273736e-06,
"count": 1,
"self": 1.030000021273736e-06
},
"TrainerController._save_models": {
"total": 0.08150267900009567,
"count": 1,
"self": 0.0013431419997687044,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08015953700032696,
"count": 1,
"self": 0.08015953700032696
}
}
}
}
}
}
}