PyramidsRND1 / run_logs /timers.json
Amir901's picture
First Push
00f3d48 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.4144514203071594,
"min": 0.3985368609428406,
"max": 1.4684513807296753,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 12506.486328125,
"min": 11688.2890625,
"max": 44546.94140625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989990.0,
"min": 29953.0,
"max": 989990.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989990.0,
"min": 29953.0,
"max": 989990.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.6314187049865723,
"min": -0.11818081140518188,
"max": 0.6545875072479248,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 178.69149780273438,
"min": -28.48157501220703,
"max": 187.86660766601562,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.016399478539824486,
"min": 0.008780745789408684,
"max": 0.45667171478271484,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 4.64105224609375,
"min": 2.4322664737701416,
"max": 108.6878662109375,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06818691839302023,
"min": 0.06371709582004052,
"max": 0.07284691916820342,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9546168575022831,
"min": 0.4985615048922489,
"max": 1.0601195945346262,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.012970937485313428,
"min": 0.0014985303531402582,
"max": 0.015495542700810707,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.181593124794388,
"min": 0.0179823642376831,
"max": 0.2169375978113499,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.63310459852143e-06,
"min": 7.63310459852143e-06,
"max": 0.00029523835873007144,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010686346437930001,
"min": 0.00010686346437930001,
"max": 0.0035085032304989993,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10254433571428571,
"min": 0.10254433571428571,
"max": 0.1984127857142857,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4356207,
"min": 1.3888894999999999,
"max": 2.5725670000000003,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002641791378571429,
"min": 0.0002641791378571429,
"max": 0.009841437292857141,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0036985079300000004,
"min": 0.0036985079300000004,
"max": 0.11697314990000002,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.016564244404435158,
"min": 0.016564244404435158,
"max": 0.49161505699157715,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.2318994253873825,
"min": 0.2318994253873825,
"max": 3.44130539894104,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 316.88775510204084,
"min": 285.92929292929296,
"max": 997.53125,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31055.0,
"min": 16624.0,
"max": 33326.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6002865775497919,
"min": -0.935975051485002,
"max": 1.693846135830077,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 155.2277980223298,
"min": -29.951201647520065,
"max": 176.159998126328,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6002865775497919,
"min": -0.935975051485002,
"max": 1.693846135830077,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 155.2277980223298,
"min": -29.951201647520065,
"max": 176.159998126328,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.05402049765254241,
"min": 0.050719526417546706,
"max": 8.996790203758898,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 5.239988272296614,
"min": 5.239988272296614,
"max": 152.94543346390128,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1735212323",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/root/miniconda3/envs/drl/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.5.1",
"numpy_version": "1.23.5",
"end_time_seconds": "1735214004"
},
"total": 1680.670391683001,
"count": 1,
"self": 0.4254881340002612,
"children": {
"run_training.setup": {
"total": 0.019970421000834904,
"count": 1,
"self": 0.019970421000834904
},
"TrainerController.start_learning": {
"total": 1680.224933128,
"count": 1,
"self": 1.5054377173928515,
"children": {
"TrainerController._reset_env": {
"total": 2.779871639000703,
"count": 1,
"self": 2.779871639000703
},
"TrainerController.advance": {
"total": 1675.883274325608,
"count": 64064,
"self": 1.381713809663779,
"children": {
"env_step": {
"total": 1127.1912900658826,
"count": 64064,
"self": 1027.8975822431948,
"children": {
"SubprocessEnvManager._take_step": {
"total": 98.3637272759297,
"count": 64064,
"self": 4.188780534972466,
"children": {
"TorchPolicy.evaluate": {
"total": 94.17494674095724,
"count": 62560,
"self": 94.17494674095724
}
}
},
"workers": {
"total": 0.9299805467580882,
"count": 64064,
"self": 0.0,
"children": {
"worker_root": {
"total": 1677.2472033449922,
"count": 64064,
"is_parallel": true,
"self": 746.3455787602197,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.001806935000786325,
"count": 1,
"is_parallel": true,
"self": 0.0006566000010934658,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011503349996928591,
"count": 8,
"is_parallel": true,
"self": 0.0011503349996928591
}
}
},
"UnityEnvironment.step": {
"total": 0.032826308999574394,
"count": 1,
"is_parallel": true,
"self": 0.0002168909977626754,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.000186282000868232,
"count": 1,
"is_parallel": true,
"self": 0.000186282000868232
},
"communicator.exchange": {
"total": 0.03172265899956983,
"count": 1,
"is_parallel": true,
"self": 0.03172265899956983
},
"steps_from_proto": {
"total": 0.0007004770013736561,
"count": 1,
"is_parallel": true,
"self": 0.0001693459998932667,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0005311310014803894,
"count": 8,
"is_parallel": true,
"self": 0.0005311310014803894
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 930.9016245847724,
"count": 64063,
"is_parallel": true,
"self": 17.117365205596798,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 15.384941251100827,
"count": 64063,
"is_parallel": true,
"self": 15.384941251100827
},
"communicator.exchange": {
"total": 850.457982580896,
"count": 64063,
"is_parallel": true,
"self": 850.457982580896
},
"steps_from_proto": {
"total": 47.941335547178824,
"count": 64063,
"is_parallel": true,
"self": 11.881226216675714,
"children": {
"_process_rank_one_or_two_observation": {
"total": 36.06010933050311,
"count": 512504,
"is_parallel": true,
"self": 36.06010933050311
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 547.3102704500616,
"count": 64064,
"self": 2.86327053792229,
"children": {
"process_trajectory": {
"total": 89.21897123414237,
"count": 64064,
"self": 89.01653165714379,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20243957699858584,
"count": 2,
"self": 0.20243957699858584
}
}
},
"_update_policy": {
"total": 455.2280286779969,
"count": 454,
"self": 213.1935840078113,
"children": {
"TorchPPOOptimizer.update": {
"total": 242.03444467018562,
"count": 22773,
"self": 242.03444467018562
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0449984984006733e-06,
"count": 1,
"self": 1.0449984984006733e-06
},
"TrainerController._save_models": {
"total": 0.05634840099992289,
"count": 1,
"self": 0.0026531379990046844,
"children": {
"RLTrainer._checkpoint": {
"total": 0.0536952630009182,
"count": 1,
"self": 0.0536952630009182
}
}
}
}
}
}
}