ppo-PyramidsRND / run_logs /timers.json
srishtibagchi's picture
first commit
c483df2 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.7978783249855042,
"min": 0.7978783249855042,
"max": 1.5030097961425781,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 24012.9453125,
"min": 24012.9453125,
"max": 45595.3046875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989925.0,
"min": 29952.0,
"max": 989925.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989925.0,
"min": 29952.0,
"max": 989925.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.08442693948745728,
"min": -0.11093426495790482,
"max": 0.13142763078212738,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 21.022308349609375,
"min": -26.624223709106445,
"max": 31.148347854614258,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.0022148864809423685,
"min": 0.0018094871193170547,
"max": 0.11095685511827469,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 0.551506757736206,
"min": 0.43608638644218445,
"max": 26.29677391052246,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06769266302165175,
"min": 0.06363532833946758,
"max": 0.07290189071520589,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9476972823031246,
"min": 0.48211918071871784,
"max": 1.0206264700128824,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.006273652748859605,
"min": 9.22956137526681e-05,
"max": 0.007606271885880892,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.08783113848403447,
"min": 0.0012921385925373534,
"max": 0.08783113848403447,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.433711807842856e-06,
"min": 7.433711807842856e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010407196530979999,
"min": 0.00010407196530979999,
"max": 0.003224579525140199,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10247787142857144,
"min": 0.10247787142857144,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4346902000000001,
"min": 1.3691136000000002,
"max": 2.3593977999999995,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025753935571428576,
"min": 0.00025753935571428576,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0036055509800000003,
"min": 0.0036055509800000003,
"max": 0.10749849401999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012230935506522655,
"min": 0.012230935506522655,
"max": 0.4239945113658905,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.17123310267925262,
"min": 0.17123310267925262,
"max": 2.967961549758911,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 807.6,
"min": 807.6,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 28266.0,
"min": 15984.0,
"max": 32532.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 0.2777999643768583,
"min": -1.0000000521540642,
"max": 0.2777999643768583,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 9.72299875319004,
"min": -32.000001668930054,
"max": 9.72299875319004,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 0.2777999643768583,
"min": -1.0000000521540642,
"max": 0.2777999643768583,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 9.72299875319004,
"min": -32.000001668930054,
"max": 9.72299875319004,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.10359016883386565,
"min": 0.10359016883386565,
"max": 8.987659143283963,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.625655909185298,
"min": 3.625655909185298,
"max": 143.8025462925434,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1740855939",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.6.0+cu124",
"numpy_version": "1.23.5",
"end_time_seconds": "1740858119"
},
"total": 2180.269202967,
"count": 1,
"self": 0.4880753930001447,
"children": {
"run_training.setup": {
"total": 0.020924384999943868,
"count": 1,
"self": 0.020924384999943868
},
"TrainerController.start_learning": {
"total": 2179.760203189,
"count": 1,
"self": 1.4958217150328892,
"children": {
"TrainerController._reset_env": {
"total": 2.12679454299996,
"count": 1,
"self": 2.12679454299996
},
"TrainerController.advance": {
"total": 2176.0466050619675,
"count": 63194,
"self": 1.5953910110797551,
"children": {
"env_step": {
"total": 1494.3086352009914,
"count": 63194,
"self": 1330.4030968470252,
"children": {
"SubprocessEnvManager._take_step": {
"total": 163.04657656995778,
"count": 63194,
"self": 5.000335806107159,
"children": {
"TorchPolicy.evaluate": {
"total": 158.04624076385062,
"count": 62565,
"self": 158.04624076385062
}
}
},
"workers": {
"total": 0.8589617840084429,
"count": 63194,
"self": 0.0,
"children": {
"worker_root": {
"total": 2174.322043326962,
"count": 63194,
"is_parallel": true,
"self": 961.8532529219756,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0020378310000523925,
"count": 1,
"is_parallel": true,
"self": 0.0006540059998769721,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013838250001754204,
"count": 8,
"is_parallel": true,
"self": 0.0013838250001754204
}
}
},
"UnityEnvironment.step": {
"total": 0.04738501399970119,
"count": 1,
"is_parallel": true,
"self": 0.0005109519993311551,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004492939997362555,
"count": 1,
"is_parallel": true,
"self": 0.0004492939997362555
},
"communicator.exchange": {
"total": 0.044696008000300935,
"count": 1,
"is_parallel": true,
"self": 0.044696008000300935
},
"steps_from_proto": {
"total": 0.0017287600003328407,
"count": 1,
"is_parallel": true,
"self": 0.0003495570003906323,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013792029999422084,
"count": 8,
"is_parallel": true,
"self": 0.0013792029999422084
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1212.4687904049865,
"count": 63193,
"is_parallel": true,
"self": 32.32208608105384,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 23.588267821843147,
"count": 63193,
"is_parallel": true,
"self": 23.588267821843147
},
"communicator.exchange": {
"total": 1057.015495350082,
"count": 63193,
"is_parallel": true,
"self": 1057.015495350082
},
"steps_from_proto": {
"total": 99.54294115200764,
"count": 63193,
"is_parallel": true,
"self": 20.61858255512834,
"children": {
"_process_rank_one_or_two_observation": {
"total": 78.9243585968793,
"count": 505544,
"is_parallel": true,
"self": 78.9243585968793
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 680.1425788498964,
"count": 63194,
"self": 2.6798844259806174,
"children": {
"process_trajectory": {
"total": 127.74681467290975,
"count": 63194,
"self": 127.53814221090943,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2086724620003224,
"count": 2,
"self": 0.2086724620003224
}
}
},
"_update_policy": {
"total": 549.715879751006,
"count": 433,
"self": 304.2269799759597,
"children": {
"TorchPPOOptimizer.update": {
"total": 245.48889977504632,
"count": 22851,
"self": 245.48889977504632
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.049999789567664e-07,
"count": 1,
"self": 9.049999789567664e-07
},
"TrainerController._save_models": {
"total": 0.09098096399975475,
"count": 1,
"self": 0.0012814729998353869,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08969949099991936,
"count": 1,
"self": 0.08969949099991936
}
}
}
}
}
}
}