ppo-pyramid / run_logs /timers.json
SleepyM's picture
First Push
ae56afb verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.3623613119125366,
"min": 0.3623613119125366,
"max": 1.4597163200378418,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 10841.8505859375,
"min": 10841.8505859375,
"max": 44281.953125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989879.0,
"min": 29952.0,
"max": 989879.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989879.0,
"min": 29952.0,
"max": 989879.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.7121767997741699,
"min": -0.07842103391885757,
"max": 0.7330880761146545,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 207.95562744140625,
"min": -18.821048736572266,
"max": 214.8376922607422,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.022392867133021355,
"min": 0.017170555889606476,
"max": 0.457565575838089,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 6.538717269897461,
"min": 4.687561988830566,
"max": 109.81573486328125,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06928242601117947,
"min": 0.06547740072225376,
"max": 0.07207416939837415,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.039236390167692,
"min": 0.4827421773810491,
"max": 1.056197498740706,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015814841035494787,
"min": 0.001528092591167077,
"max": 0.017331028425423962,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2372226155324218,
"min": 0.012266789816608094,
"max": 0.24263439795593547,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.510717496460002e-06,
"min": 7.510717496460002e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011266076244690003,
"min": 0.00011266076244690003,
"max": 0.0036334594888468995,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10250354000000002,
"min": 0.10250354000000002,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5375531000000002,
"min": 1.3691136000000002,
"max": 2.6175173000000003,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026010364600000005,
"min": 0.00026010364600000005,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003901554690000001,
"min": 0.003901554690000001,
"max": 0.12113419469000002,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.014273879118263721,
"min": 0.014273879118263721,
"max": 0.6075441241264343,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.2141081839799881,
"min": 0.20748208463191986,
"max": 4.252809047698975,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 272.10280373831773,
"min": 249.5655737704918,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29115.0,
"min": 15984.0,
"max": 33352.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.709196248970856,
"min": -1.0000000521540642,
"max": 1.7516991708094511,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 182.8839986398816,
"min": -32.000001668930054,
"max": 215.4589980095625,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.709196248970856,
"min": -1.0000000521540642,
"max": 1.7516991708094511,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 182.8839986398816,
"min": -32.000001668930054,
"max": 215.4589980095625,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.040126602303576175,
"min": 0.03802241957890782,
"max": 12.24424483627081,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.293546446482651,
"min": 4.293546446482651,
"max": 195.90791738033295,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1748956599",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.7.0+cu126",
"numpy_version": "1.23.5",
"end_time_seconds": "1748958937"
},
"total": 2337.8721931289997,
"count": 1,
"self": 0.47583602499935296,
"children": {
"run_training.setup": {
"total": 0.02806961499982208,
"count": 1,
"self": 0.02806961499982208
},
"TrainerController.start_learning": {
"total": 2337.3682874890005,
"count": 1,
"self": 1.3382924150118924,
"children": {
"TrainerController._reset_env": {
"total": 2.625964748000115,
"count": 1,
"self": 2.625964748000115
},
"TrainerController.advance": {
"total": 2333.324025422988,
"count": 64446,
"self": 1.4095005940198462,
"children": {
"env_step": {
"total": 1662.5593968820185,
"count": 64446,
"self": 1515.2147191770405,
"children": {
"SubprocessEnvManager._take_step": {
"total": 146.5989373608113,
"count": 64446,
"self": 4.5510353537911215,
"children": {
"TorchPolicy.evaluate": {
"total": 142.04790200702018,
"count": 62562,
"self": 142.04790200702018
}
}
},
"workers": {
"total": 0.7457403441667338,
"count": 64446,
"self": 0.0,
"children": {
"worker_root": {
"total": 2332.5359417021036,
"count": 64446,
"is_parallel": true,
"self": 927.9723316081286,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0020017169999846374,
"count": 1,
"is_parallel": true,
"self": 0.0006582520004485559,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013434649995360815,
"count": 8,
"is_parallel": true,
"self": 0.0013434649995360815
}
}
},
"UnityEnvironment.step": {
"total": 0.047814576999826386,
"count": 1,
"is_parallel": true,
"self": 0.0005682400001205679,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004454729996723472,
"count": 1,
"is_parallel": true,
"self": 0.0004454729996723472
},
"communicator.exchange": {
"total": 0.04523406999987856,
"count": 1,
"is_parallel": true,
"self": 0.04523406999987856
},
"steps_from_proto": {
"total": 0.001566794000154914,
"count": 1,
"is_parallel": true,
"self": 0.0003541410005709622,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001212652999583952,
"count": 8,
"is_parallel": true,
"self": 0.001212652999583952
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1404.563610093975,
"count": 64445,
"is_parallel": true,
"self": 31.3379355910065,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.65854317603589,
"count": 64445,
"is_parallel": true,
"self": 22.65854317603589
},
"communicator.exchange": {
"total": 1257.5746788039805,
"count": 64445,
"is_parallel": true,
"self": 1257.5746788039805
},
"steps_from_proto": {
"total": 92.99245252295214,
"count": 64445,
"is_parallel": true,
"self": 18.465762725118566,
"children": {
"_process_rank_one_or_two_observation": {
"total": 74.52668979783357,
"count": 515560,
"is_parallel": true,
"self": 74.52668979783357
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 669.3551279469498,
"count": 64446,
"self": 2.547122582981501,
"children": {
"process_trajectory": {
"total": 128.92563509397587,
"count": 64446,
"self": 128.68909866197646,
"children": {
"RLTrainer._checkpoint": {
"total": 0.23653643199941143,
"count": 2,
"self": 0.23653643199941143
}
}
},
"_update_policy": {
"total": 537.8823702699924,
"count": 451,
"self": 298.68874422200724,
"children": {
"TorchPPOOptimizer.update": {
"total": 239.19362604798516,
"count": 22797,
"self": 239.19362604798516
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.10000380827114e-07,
"count": 1,
"self": 9.10000380827114e-07
},
"TrainerController._save_models": {
"total": 0.08000399299999117,
"count": 1,
"self": 0.0011264970007687225,
"children": {
"RLTrainer._checkpoint": {
"total": 0.07887749599922245,
"count": 1,
"self": 0.07887749599922245
}
}
}
}
}
}
}