ppo-Pyramids / run_logs /timers.json
zwse's picture
First Push
02f181e verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.36977726221084595,
"min": 0.3580505847930908,
"max": 1.450472116470337,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 11146.5654296875,
"min": 10730.0595703125,
"max": 44001.5234375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989959.0,
"min": 29910.0,
"max": 989959.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989959.0,
"min": 29910.0,
"max": 989959.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.511104941368103,
"min": -0.09569025784730911,
"max": 0.511104941368103,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 139.02053833007812,
"min": -23.061351776123047,
"max": 140.27413940429688,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.0028107797261327505,
"min": -0.0028107797261327505,
"max": 0.3171139061450958,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -0.7645320892333984,
"min": -0.7645320892333984,
"max": 76.21006774902344,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06836471756280585,
"min": 0.06690948150935583,
"max": 0.07832613427867526,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9571060458792819,
"min": 0.5482829399507267,
"max": 1.0661902121906215,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.016924253872428442,
"min": 0.000883372145429386,
"max": 0.017694441992803366,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.23693955421399818,
"min": 0.011483837890582018,
"max": 0.2477221878992471,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.690918864964284e-06,
"min": 7.690918864964284e-06,
"max": 0.0002952347158741428,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010767286410949997,
"min": 0.00010767286410949997,
"max": 0.003608931997022699,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10256360714285714,
"min": 0.10256360714285714,
"max": 0.19841157142857144,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4358905,
"min": 1.388881,
"max": 2.5281195,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026610435357142856,
"min": 0.00026610435357142856,
"max": 0.009841315985714286,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0037254609499999996,
"min": 0.0037254609499999996,
"max": 0.12030743227,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.007130883634090424,
"min": 0.007130883634090424,
"max": 0.5126622915267944,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.09983237087726593,
"min": 0.09983237087726593,
"max": 3.5886359214782715,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 374.6666666666667,
"min": 346.1025641025641,
"max": 996.96875,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 32596.0,
"min": 16709.0,
"max": 32758.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5551883494264858,
"min": -0.9352375506423414,
"max": 1.5769512663858058,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 133.74619805067778,
"min": -29.927601620554924,
"max": 133.74619805067778,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5551883494264858,
"min": -0.9352375506423414,
"max": 1.5769512663858058,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 133.74619805067778,
"min": -29.927601620554924,
"max": 133.74619805067778,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.028216918677614115,
"min": 0.028216918677614115,
"max": 10.193550705909729,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.426655006274814,
"min": 2.2230944259572425,
"max": 173.2903620004654,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1740969635",
"python_version": "3.10.12 (main, Feb 4 2025, 14:57:36) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.6.0+cu124",
"numpy_version": "1.23.5",
"end_time_seconds": "1740972709"
},
"total": 3073.866505559,
"count": 1,
"self": 0.6370618709997871,
"children": {
"run_training.setup": {
"total": 0.051447962000565894,
"count": 1,
"self": 0.051447962000565894
},
"TrainerController.start_learning": {
"total": 3073.1779957259996,
"count": 1,
"self": 2.323609612995824,
"children": {
"TrainerController._reset_env": {
"total": 5.540949600000204,
"count": 1,
"self": 5.540949600000204
},
"TrainerController.advance": {
"total": 3065.2258622980034,
"count": 63935,
"self": 2.4867665199499243,
"children": {
"env_step": {
"total": 2038.6802913680858,
"count": 63935,
"self": 1879.7043428387487,
"children": {
"SubprocessEnvManager._take_step": {
"total": 157.55149409516343,
"count": 63935,
"self": 6.526093197179762,
"children": {
"TorchPolicy.evaluate": {
"total": 151.02540089798367,
"count": 62572,
"self": 151.02540089798367
}
}
},
"workers": {
"total": 1.42445443417364,
"count": 63935,
"self": 0.0,
"children": {
"worker_root": {
"total": 3066.2177714231775,
"count": 63935,
"is_parallel": true,
"self": 1353.0819421951646,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.007856244000322476,
"count": 1,
"is_parallel": true,
"self": 0.004865047999373928,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0029911960009485483,
"count": 8,
"is_parallel": true,
"self": 0.0029911960009485483
}
}
},
"UnityEnvironment.step": {
"total": 0.2623013610000271,
"count": 1,
"is_parallel": true,
"self": 0.0044218780003575375,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0006978009996601031,
"count": 1,
"is_parallel": true,
"self": 0.0006978009996601031
},
"communicator.exchange": {
"total": 0.24702279200027988,
"count": 1,
"is_parallel": true,
"self": 0.24702279200027988
},
"steps_from_proto": {
"total": 0.010158889999729581,
"count": 1,
"is_parallel": true,
"self": 0.0027321899997332366,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0074266999999963446,
"count": 8,
"is_parallel": true,
"self": 0.0074266999999963446
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1713.1358292280129,
"count": 63934,
"is_parallel": true,
"self": 45.48663933717489,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 30.03815249996751,
"count": 63934,
"is_parallel": true,
"self": 30.03815249996751
},
"communicator.exchange": {
"total": 1509.0095357108576,
"count": 63934,
"is_parallel": true,
"self": 1509.0095357108576
},
"steps_from_proto": {
"total": 128.60150168001292,
"count": 63934,
"is_parallel": true,
"self": 27.371362279034656,
"children": {
"_process_rank_one_or_two_observation": {
"total": 101.23013940097826,
"count": 511472,
"is_parallel": true,
"self": 101.23013940097826
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1024.0588044099677,
"count": 63935,
"self": 4.656121496868764,
"children": {
"process_trajectory": {
"total": 155.849403922095,
"count": 63935,
"self": 155.41898029609547,
"children": {
"RLTrainer._checkpoint": {
"total": 0.43042362599953776,
"count": 2,
"self": 0.43042362599953776
}
}
},
"_update_policy": {
"total": 863.5532789910039,
"count": 457,
"self": 354.14338129989756,
"children": {
"TorchPPOOptimizer.update": {
"total": 509.4098976911064,
"count": 22788,
"self": 509.4098976911064
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0430003385408781e-06,
"count": 1,
"self": 1.0430003385408781e-06
},
"TrainerController._save_models": {
"total": 0.08757317199979298,
"count": 1,
"self": 0.0019128180001644068,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08566035399962857,
"count": 1,
"self": 0.08566035399962857
}
}
}
}
}
}
}