ppo-Pyramids / run_logs /timers.json
uddie6930's picture
First Push
03a8c91 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.3553783595561981,
"min": 0.346317857503891,
"max": 1.435851812362671,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 10695.466796875,
"min": 10361.830078125,
"max": 43558.0,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989926.0,
"min": 29952.0,
"max": 989926.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989926.0,
"min": 29952.0,
"max": 989926.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.43260225653648376,
"min": -0.12665030360221863,
"max": 0.45175886154174805,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 115.93740844726562,
"min": -30.396072387695312,
"max": 121.97489166259766,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.005802266299724579,
"min": -0.018867891281843185,
"max": 0.5685024857521057,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 1.5550073385238647,
"min": -4.9245195388793945,
"max": 134.73509216308594,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06774500681268465,
"min": 0.06405509753229015,
"max": 0.07140618503104808,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.0161751021902699,
"min": 0.4845990476948838,
"max": 1.0191539609513711,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.01585449717790147,
"min": 0.0004049673634519863,
"max": 0.020843925277719305,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.23781745766852205,
"min": 0.005669543088327808,
"max": 0.23781745766852205,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.461177512973335e-06,
"min": 7.461177512973335e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011191766269460003,
"min": 0.00011191766269460003,
"max": 0.0032593769135410987,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10248702666666666,
"min": 0.10248702666666666,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5373054,
"min": 1.3886848,
"max": 2.4012597999999996,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002584539640000001,
"min": 0.0002584539640000001,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003876809460000002,
"min": 0.003876809460000002,
"max": 0.10866724410999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.015416407026350498,
"min": 0.015260426327586174,
"max": 0.7827287912368774,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.23124609887599945,
"min": 0.21364596486091614,
"max": 5.479101657867432,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 420.97222222222223,
"min": 376.94666666666666,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30310.0,
"min": 15984.0,
"max": 33183.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.440086082658834,
"min": -1.0000000521540642,
"max": 1.4896959795554479,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 103.68619795143604,
"min": -31.99760165810585,
"max": 111.72719846665859,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.440086082658834,
"min": -1.0000000521540642,
"max": 1.4896959795554479,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 103.68619795143604,
"min": -31.99760165810585,
"max": 111.72719846665859,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.06699393497122703,
"min": 0.06059273347103347,
"max": 17.562319481745362,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.823563317928347,
"min": 4.5444550103275105,
"max": 280.9971117079258,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1739108719",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.6.0+cu124",
"numpy_version": "1.23.5",
"end_time_seconds": "1739110904"
},
"total": 2184.5702345870004,
"count": 1,
"self": 0.47835737700006575,
"children": {
"run_training.setup": {
"total": 0.020504545000221697,
"count": 1,
"self": 0.020504545000221697
},
"TrainerController.start_learning": {
"total": 2184.071372665,
"count": 1,
"self": 1.4008238769870331,
"children": {
"TrainerController._reset_env": {
"total": 2.2087103169997135,
"count": 1,
"self": 2.2087103169997135
},
"TrainerController.advance": {
"total": 2180.3783422570136,
"count": 63557,
"self": 1.3969407978966046,
"children": {
"env_step": {
"total": 1491.965987681016,
"count": 63557,
"self": 1336.9593200831728,
"children": {
"SubprocessEnvManager._take_step": {
"total": 154.17217848386326,
"count": 63557,
"self": 4.625525315905634,
"children": {
"TorchPolicy.evaluate": {
"total": 149.54665316795763,
"count": 62556,
"self": 149.54665316795763
}
}
},
"workers": {
"total": 0.8344891139799984,
"count": 63557,
"self": 0.0,
"children": {
"worker_root": {
"total": 2179.0516729129495,
"count": 63557,
"is_parallel": true,
"self": 954.8816518348685,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.00202972100032639,
"count": 1,
"is_parallel": true,
"self": 0.0006579059991054237,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013718150012209662,
"count": 8,
"is_parallel": true,
"self": 0.0013718150012209662
}
}
},
"UnityEnvironment.step": {
"total": 0.047824443000081374,
"count": 1,
"is_parallel": true,
"self": 0.0005000560004191357,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004484719997890352,
"count": 1,
"is_parallel": true,
"self": 0.0004484719997890352
},
"communicator.exchange": {
"total": 0.04532555700006924,
"count": 1,
"is_parallel": true,
"self": 0.04532555700006924
},
"steps_from_proto": {
"total": 0.001550357999803964,
"count": 1,
"is_parallel": true,
"self": 0.0003488219999780995,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012015359998258646,
"count": 8,
"is_parallel": true,
"self": 0.0012015359998258646
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1224.170021078081,
"count": 63556,
"is_parallel": true,
"self": 30.906059576126154,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 21.977312766067826,
"count": 63556,
"is_parallel": true,
"self": 21.977312766067826
},
"communicator.exchange": {
"total": 1078.1902389279762,
"count": 63556,
"is_parallel": true,
"self": 1078.1902389279762
},
"steps_from_proto": {
"total": 93.09640980791073,
"count": 63556,
"is_parallel": true,
"self": 18.53123562672681,
"children": {
"_process_rank_one_or_two_observation": {
"total": 74.56517418118392,
"count": 508448,
"is_parallel": true,
"self": 74.56517418118392
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 687.015413778101,
"count": 63557,
"self": 2.6010595190668937,
"children": {
"process_trajectory": {
"total": 127.34816791603635,
"count": 63557,
"self": 127.1437272340363,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2044406820000404,
"count": 2,
"self": 0.2044406820000404
}
}
},
"_update_policy": {
"total": 557.0661863429978,
"count": 442,
"self": 305.59626562004996,
"children": {
"TorchPPOOptimizer.update": {
"total": 251.4699207229478,
"count": 22812,
"self": 251.4699207229478
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.39999302115757e-07,
"count": 1,
"self": 8.39999302115757e-07
},
"TrainerController._save_models": {
"total": 0.08349537400044937,
"count": 1,
"self": 0.0013982339996800874,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08209714000076929,
"count": 1,
"self": 0.08209714000076929
}
}
}
}
}
}
}