Pyramidv1 / run_logs /timers.json
antonioschiro's picture
First push
87906c2 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.642172634601593,
"min": 0.642172634601593,
"max": 1.4805656671524048,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 19131.607421875,
"min": 19131.607421875,
"max": 44914.44140625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989908.0,
"min": 29952.0,
"max": 989908.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989908.0,
"min": 29952.0,
"max": 989908.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.4079838991165161,
"min": -0.26397669315338135,
"max": 0.4315630793571472,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 107.70774841308594,
"min": -62.562477111816406,
"max": 116.95359802246094,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.002595194848254323,
"min": -0.2178112417459488,
"max": 0.18550418317317963,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -0.6851314306259155,
"min": -57.28435516357422,
"max": 44.52100372314453,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.0707030601063577,
"min": 0.06505951898829412,
"max": 0.07324577131629187,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9898428414890077,
"min": 0.5127203992140431,
"max": 1.0768232921174927,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.013963222901067987,
"min": 0.0002533068754298385,
"max": 0.03407538086117901,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.19548512061495182,
"min": 0.003039682505158062,
"max": 0.5111307129176852,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.654940305528572e-06,
"min": 7.654940305528572e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010716916427740001,
"min": 0.00010716916427740001,
"max": 0.003508013330662299,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10255161428571428,
"min": 0.10255161428571428,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4357226,
"min": 1.3886848,
"max": 2.5693376999999997,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026490626714285723,
"min": 0.00026490626714285723,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0037086877400000014,
"min": 0.0037086877400000014,
"max": 0.11695683622999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.007136494852602482,
"min": 0.007136494852602482,
"max": 0.28169915080070496,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.0999109297990799,
"min": 0.0999109297990799,
"max": 1.9718941450119019,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 488.9852941176471,
"min": 410.81081081081084,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 33251.0,
"min": 15984.0,
"max": 33424.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.3344882145962293,
"min": -1.0000000521540642,
"max": 1.4545117438058643,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 90.7451985925436,
"min": -30.999201618134975,
"max": 107.35939835757017,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.3344882145962293,
"min": -1.0000000521540642,
"max": 1.4545117438058643,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 90.7451985925436,
"min": -30.999201618134975,
"max": 107.35939835757017,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.036259704605874826,
"min": 0.032090318124877136,
"max": 5.475978604517877,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.465659913199488,
"min": 2.1752223440125817,
"max": 87.61565767228603,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1759135832",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.8.0+cu128",
"numpy_version": "1.23.5",
"end_time_seconds": "1759137895"
},
"total": 2062.399851555,
"count": 1,
"self": 0.4303069309999046,
"children": {
"run_training.setup": {
"total": 0.032243344999869805,
"count": 1,
"self": 0.032243344999869805
},
"TrainerController.start_learning": {
"total": 2061.937301279,
"count": 1,
"self": 1.2171247989763287,
"children": {
"TrainerController._reset_env": {
"total": 2.2456406619999143,
"count": 1,
"self": 2.2456406619999143
},
"TrainerController.advance": {
"total": 2058.3983204150236,
"count": 63548,
"self": 1.2417737879759443,
"children": {
"env_step": {
"total": 1422.2167690900278,
"count": 63548,
"self": 1283.0462695279657,
"children": {
"SubprocessEnvManager._take_step": {
"total": 138.43949832503176,
"count": 63548,
"self": 4.291974304977657,
"children": {
"TorchPolicy.evaluate": {
"total": 134.1475240200541,
"count": 62552,
"self": 134.1475240200541
}
}
},
"workers": {
"total": 0.7310012370303411,
"count": 63548,
"self": 0.0,
"children": {
"worker_root": {
"total": 2055.3477027509357,
"count": 63548,
"is_parallel": true,
"self": 880.0893714509082,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017863380001017504,
"count": 1,
"is_parallel": true,
"self": 0.000633719999996174,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011526180001055764,
"count": 8,
"is_parallel": true,
"self": 0.0011526180001055764
}
}
},
"UnityEnvironment.step": {
"total": 0.048270240000192643,
"count": 1,
"is_parallel": true,
"self": 0.0005147970002781221,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004455739999684738,
"count": 1,
"is_parallel": true,
"self": 0.0004455739999684738
},
"communicator.exchange": {
"total": 0.04571785500002079,
"count": 1,
"is_parallel": true,
"self": 0.04571785500002079
},
"steps_from_proto": {
"total": 0.001592013999925257,
"count": 1,
"is_parallel": true,
"self": 0.00032465599997522077,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012673579999500362,
"count": 8,
"is_parallel": true,
"self": 0.0012673579999500362
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1175.2583313000275,
"count": 63547,
"is_parallel": true,
"self": 32.09587272102681,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 21.870403530016574,
"count": 63547,
"is_parallel": true,
"self": 21.870403530016574
},
"communicator.exchange": {
"total": 1021.2488769260347,
"count": 63547,
"is_parallel": true,
"self": 1021.2488769260347
},
"steps_from_proto": {
"total": 100.04317812294948,
"count": 63547,
"is_parallel": true,
"self": 20.471825397034536,
"children": {
"_process_rank_one_or_two_observation": {
"total": 79.57135272591495,
"count": 508376,
"is_parallel": true,
"self": 79.57135272591495
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 634.9397775370198,
"count": 63548,
"self": 2.4064408829867716,
"children": {
"process_trajectory": {
"total": 118.0116583780316,
"count": 63548,
"self": 117.83537837103154,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1762800070000594,
"count": 2,
"self": 0.1762800070000594
}
}
},
"_update_policy": {
"total": 514.5216782760015,
"count": 446,
"self": 285.92432495798926,
"children": {
"TorchPPOOptimizer.update": {
"total": 228.5973533180122,
"count": 22770,
"self": 228.5973533180122
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.350001164420974e-07,
"count": 1,
"self": 9.350001164420974e-07
},
"TrainerController._save_models": {
"total": 0.07621446800021658,
"count": 1,
"self": 0.0011342440002408694,
"children": {
"RLTrainer._checkpoint": {
"total": 0.07508022399997571,
"count": 1,
"self": 0.07508022399997571
}
}
}
}
}
}
}