Elemental-EL's picture
First Push
2a9bb37 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.21312913298606873,
"min": 0.21312913298606873,
"max": 0.34789592027664185,
"count": 30
},
"Pyramids.Policy.Entropy.sum": {
"value": 6431.384765625,
"min": 6431.384765625,
"max": 10743.0263671875,
"count": 30
},
"Pyramids.Step.mean": {
"value": 2099875.0,
"min": 1229959.0,
"max": 2099875.0,
"count": 30
},
"Pyramids.Step.sum": {
"value": 2099875.0,
"min": 1229959.0,
"max": 2099875.0,
"count": 30
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.7284514307975769,
"min": 0.5540592074394226,
"max": 0.7820839285850525,
"count": 30
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 212.7078094482422,
"min": 154.58251953125,
"max": 235.40725708007812,
"count": 30
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.00011148242629133165,
"min": -0.036523718386888504,
"max": 0.023824898526072502,
"count": 30
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 0.032552868127822876,
"min": -10.190117835998535,
"max": 6.933045387268066,
"count": 30
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 246.27272727272728,
"min": 230.70247933884298,
"max": 348.45054945054943,
"count": 30
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29799.0,
"min": 27384.0,
"max": 31898.0,
"count": 30
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.7371950270223224,
"min": 1.5598866424626774,
"max": 1.7692975122327648,
"count": 30
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 210.200598269701,
"min": 140.38979782164097,
"max": 218.61299833655357,
"count": 30
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.7371950270223224,
"min": 1.5598866424626774,
"max": 1.7692975122327648,
"count": 30
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 210.200598269701,
"min": 140.38979782164097,
"max": 218.61299833655357,
"count": 30
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.027385835378880088,
"min": 0.026519934541434205,
"max": 0.05033405645841008,
"count": 30
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.313686080844491,
"min": 3.2089120795135386,
"max": 4.530065081256907,
"count": 30
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07038099201536654,
"min": 0.06557304179805419,
"max": 0.07255525143340318,
"count": 30
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9853338882151317,
"min": 0.9180225851727586,
"max": 1.0615770213286548,
"count": 30
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.016201011241204624,
"min": 0.012865025205568656,
"max": 0.017286618631462466,
"count": 30
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.22681415737686475,
"min": 0.18011035287796118,
"max": 0.259299279471937,
"count": 30
},
"Pyramids.Policy.LearningRate.mean": {
"value": 2.2603237363877542e-06,
"min": 2.2603237363877542e-06,
"max": 0.00012631876197539797,
"count": 30
},
"Pyramids.Policy.LearningRate.sum": {
"value": 3.164453230942856e-05,
"min": 3.164453230942856e-05,
"max": 0.0017684626676555716,
"count": 30
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.1007534081632653,
"min": 0.1007534081632653,
"max": 0.1421062346938776,
"count": 30
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4105477142857143,
"min": 1.4105477142857143,
"max": 2.067913761904762,
"count": 30
},
"Pyramids.Policy.Beta.mean": {
"value": 8.526547551020405e-05,
"min": 8.526547551020405e-05,
"max": 0.004216412845918367,
"count": 30
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0011937166571428567,
"min": 0.0011937166571428567,
"max": 0.059029779842857136,
"count": 30
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.01070969644933939,
"min": 0.010628776624798775,
"max": 0.014891265891492367,
"count": 30
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1499357521533966,
"min": 0.14887894690036774,
"max": 0.20847772061824799,
"count": 30
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 30
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 30
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1767209721",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics --resume",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.8.0+cu128",
"numpy_version": "1.23.5",
"end_time_seconds": "1767211999"
},
"total": 2278.205391746,
"count": 1,
"self": 0.4811494629993831,
"children": {
"run_training.setup": {
"total": 0.022436406000451825,
"count": 1,
"self": 0.022436406000451825
},
"TrainerController.start_learning": {
"total": 2277.701805877,
"count": 1,
"self": 1.2841876258898992,
"children": {
"TrainerController._reset_env": {
"total": 2.0473017460008123,
"count": 1,
"self": 2.0473017460008123
},
"TrainerController.advance": {
"total": 2274.2910103411086,
"count": 58898,
"self": 1.3469239300684421,
"children": {
"env_step": {
"total": 1689.9132824588696,
"count": 58898,
"self": 1557.947543781861,
"children": {
"SubprocessEnvManager._take_step": {
"total": 131.1971453071019,
"count": 58898,
"self": 4.195159451937798,
"children": {
"TorchPolicy.evaluate": {
"total": 127.0019858551641,
"count": 56301,
"self": 127.0019858551641
}
}
},
"workers": {
"total": 0.7685933699067391,
"count": 58898,
"self": 0.0,
"children": {
"worker_root": {
"total": 2271.9018152890612,
"count": 58898,
"is_parallel": true,
"self": 822.0037834562008,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0018908930005636648,
"count": 1,
"is_parallel": true,
"self": 0.0006353810003929539,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001255512000170711,
"count": 8,
"is_parallel": true,
"self": 0.001255512000170711
}
}
},
"UnityEnvironment.step": {
"total": 0.0512661869997828,
"count": 1,
"is_parallel": true,
"self": 0.000529197998730524,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005153150004844065,
"count": 1,
"is_parallel": true,
"self": 0.0005153150004844065
},
"communicator.exchange": {
"total": 0.04860676699991018,
"count": 1,
"is_parallel": true,
"self": 0.04860676699991018
},
"steps_from_proto": {
"total": 0.0016149070006576949,
"count": 1,
"is_parallel": true,
"self": 0.00032013300005928613,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012947740005984087,
"count": 8,
"is_parallel": true,
"self": 0.0012947740005984087
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1449.8980318328604,
"count": 58897,
"is_parallel": true,
"self": 30.372874912755833,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 21.270293496020713,
"count": 58897,
"is_parallel": true,
"self": 21.270293496020713
},
"communicator.exchange": {
"total": 1300.3045197430047,
"count": 58897,
"is_parallel": true,
"self": 1300.3045197430047
},
"steps_from_proto": {
"total": 97.95034368107918,
"count": 58897,
"is_parallel": true,
"self": 20.66544297605651,
"children": {
"_process_rank_one_or_two_observation": {
"total": 77.28490070502266,
"count": 471176,
"is_parallel": true,
"self": 77.28490070502266
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 583.0308039521706,
"count": 58898,
"self": 2.6356087911763098,
"children": {
"process_trajectory": {
"total": 117.40548396100075,
"count": 58898,
"self": 117.21255378300066,
"children": {
"RLTrainer._checkpoint": {
"total": 0.19293017800009693,
"count": 2,
"self": 0.19293017800009693
}
}
},
"_update_policy": {
"total": 462.9897111999935,
"count": 426,
"self": 257.0169891231153,
"children": {
"TorchPPOOptimizer.update": {
"total": 205.9727220768782,
"count": 20493,
"self": 205.9727220768782
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.168999915535096e-06,
"count": 1,
"self": 1.168999915535096e-06
},
"TrainerController._save_models": {
"total": 0.07930499500071164,
"count": 1,
"self": 0.001485969000896148,
"children": {
"RLTrainer._checkpoint": {
"total": 0.0778190259998155,
"count": 1,
"self": 0.0778190259998155
}
}
}
}
}
}
}