ppo-PyramidsRND / run_logs /timers.json
ZachXie's picture
First Push
90e16de verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.2630590498447418,
"min": 0.260686993598938,
"max": 1.4080257415771484,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 7967.5322265625,
"min": 7820.60986328125,
"max": 42713.8671875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989996.0,
"min": 29952.0,
"max": 989996.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989996.0,
"min": 29952.0,
"max": 989996.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.512125551700592,
"min": -0.10252418369054794,
"max": 0.5883321762084961,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 142.3708953857422,
"min": -24.708328247070312,
"max": 165.3213348388672,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.00012290092126931995,
"min": -0.006286347284913063,
"max": 0.2322310507297516,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -0.03416645526885986,
"min": -1.7098864316940308,
"max": 55.967681884765625,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06906213116391362,
"min": 0.06284252128697763,
"max": 0.07243062176099654,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9668698362947907,
"min": 0.5070143523269758,
"max": 1.072647689259611,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015590981529385315,
"min": 0.00021146499505555192,
"max": 0.015590981529385315,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2182737414113944,
"min": 0.002537579940666623,
"max": 0.2182737414113944,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.341547552850003e-06,
"min": 7.341547552850003e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010278166573990005,
"min": 0.00010278166573990005,
"max": 0.0031372418542527994,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10244715000000003,
"min": 0.10244715000000003,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4342601000000004,
"min": 1.3886848,
"max": 2.4423257,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025447028500000013,
"min": 0.00025447028500000013,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035625839900000015,
"min": 0.0035625839900000015,
"max": 0.10460014527999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.010092445649206638,
"min": 0.010092445649206638,
"max": 0.38658854365348816,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.14129424095153809,
"min": 0.14129424095153809,
"max": 2.7061197757720947,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 368.8139534883721,
"min": 331.9583333333333,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31718.0,
"min": 15984.0,
"max": 33430.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5613906756909781,
"min": -1.0000000521540642,
"max": 1.6472062339695792,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 134.27959810942411,
"min": -32.000001668930054,
"max": 158.1317984610796,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5613906756909781,
"min": -1.0000000521540642,
"max": 1.6472062339695792,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 134.27959810942411,
"min": -32.000001668930054,
"max": 158.1317984610796,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03827912207736179,
"min": 0.03659147060676421,
"max": 7.76106836181134,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.292004498653114,
"min": 3.0005005897546653,
"max": 124.17709378898144,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1733017459",
"python_version": "3.10.12 (main, Nov 6 2024, 20:22:13) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.5.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1733020930"
},
"total": 3471.067061704,
"count": 1,
"self": 0.7560304379999252,
"children": {
"run_training.setup": {
"total": 0.07514669899995852,
"count": 1,
"self": 0.07514669899995852
},
"TrainerController.start_learning": {
"total": 3470.235884567,
"count": 1,
"self": 2.3538947289266616,
"children": {
"TrainerController._reset_env": {
"total": 6.392761468000003,
"count": 1,
"self": 6.392761468000003
},
"TrainerController.advance": {
"total": 3461.3944915120737,
"count": 63901,
"self": 2.44964012111177,
"children": {
"env_step": {
"total": 2349.9380653939543,
"count": 63901,
"self": 2182.722877427921,
"children": {
"SubprocessEnvManager._take_step": {
"total": 165.82084231200633,
"count": 63901,
"self": 7.0678217170002995,
"children": {
"TorchPolicy.evaluate": {
"total": 158.75302059500603,
"count": 62554,
"self": 158.75302059500603
}
}
},
"workers": {
"total": 1.3943456540268357,
"count": 63901,
"self": 0.0,
"children": {
"worker_root": {
"total": 3462.597453573025,
"count": 63901,
"is_parallel": true,
"self": 1459.3293124060133,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0024202070000001186,
"count": 1,
"is_parallel": true,
"self": 0.0007652370000528208,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0016549699999472978,
"count": 8,
"is_parallel": true,
"self": 0.0016549699999472978
}
}
},
"UnityEnvironment.step": {
"total": 0.06004624399997738,
"count": 1,
"is_parallel": true,
"self": 0.0007644180000170309,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.000507192999975814,
"count": 1,
"is_parallel": true,
"self": 0.000507192999975814
},
"communicator.exchange": {
"total": 0.05679142999997566,
"count": 1,
"is_parallel": true,
"self": 0.05679142999997566
},
"steps_from_proto": {
"total": 0.0019832030000088707,
"count": 1,
"is_parallel": true,
"self": 0.00041981600020335463,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001563386999805516,
"count": 8,
"is_parallel": true,
"self": 0.001563386999805516
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 2003.268141167012,
"count": 63900,
"is_parallel": true,
"self": 50.54075389499576,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 31.119446010002378,
"count": 63900,
"is_parallel": true,
"self": 31.119446010002378
},
"communicator.exchange": {
"total": 1790.2953599210011,
"count": 63900,
"is_parallel": true,
"self": 1790.2953599210011
},
"steps_from_proto": {
"total": 131.31258134101273,
"count": 63900,
"is_parallel": true,
"self": 28.295986214030847,
"children": {
"_process_rank_one_or_two_observation": {
"total": 103.01659512698188,
"count": 511200,
"is_parallel": true,
"self": 103.01659512698188
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1109.0067859970077,
"count": 63901,
"self": 4.271272413020824,
"children": {
"process_trajectory": {
"total": 172.4996754099912,
"count": 63901,
"self": 172.14421187199088,
"children": {
"RLTrainer._checkpoint": {
"total": 0.35546353800032193,
"count": 2,
"self": 0.35546353800032193
}
}
},
"_update_policy": {
"total": 932.2358381739957,
"count": 444,
"self": 374.8597494629886,
"children": {
"TorchPPOOptimizer.update": {
"total": 557.3760887110071,
"count": 22812,
"self": 557.3760887110071
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.1099996299890336e-06,
"count": 1,
"self": 1.1099996299890336e-06
},
"TrainerController._save_models": {
"total": 0.09473574799994822,
"count": 1,
"self": 0.002034397999977955,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09270134999997026,
"count": 1,
"self": 0.09270134999997026
}
}
}
}
}
}
}