ppo-Pyramids / run_logs /timers.json
trannhoduc's picture
First Push
0d006f1 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5869534611701965,
"min": 0.5869534611701965,
"max": 1.382928490638733,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 17749.47265625,
"min": 17731.7890625,
"max": 41952.51953125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989982.0,
"min": 29952.0,
"max": 989982.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989982.0,
"min": 29952.0,
"max": 989982.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.26657140254974365,
"min": -0.09772180765867233,
"max": 0.2731958031654358,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 69.57513427734375,
"min": -23.45323371887207,
"max": 69.66493225097656,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.10352865606546402,
"min": -0.056626126170158386,
"max": 0.3655428886413574,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 27.020978927612305,
"min": -14.439661979675293,
"max": 86.6336669921875,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.0682973407393733,
"min": 0.06438571505880877,
"max": 0.07480637028548247,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9561627703512261,
"min": 0.46316986805097005,
"max": 1.0533935562228125,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.0211634631214569,
"min": 4.81687363239384e-05,
"max": 0.0211634631214569,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2962884837003966,
"min": 0.0006261935722111993,
"max": 0.2962884837003966,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.429576094935716e-06,
"min": 7.429576094935716e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010401406532910002,
"min": 0.00010401406532910002,
"max": 0.0033827846724052003,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10247649285714287,
"min": 0.10247649285714287,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4346709000000002,
"min": 1.3691136000000002,
"max": 2.5275948000000006,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002574016364285715,
"min": 0.0002574016364285715,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003603622910000001,
"min": 0.003603622910000001,
"max": 0.11278672052,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.009306291118264198,
"min": 0.009241444058716297,
"max": 0.4796140491962433,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.13028807938098907,
"min": 0.1293802112340927,
"max": 3.3572983741760254,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 522.3333333333334,
"min": 522.3333333333334,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29773.0,
"min": 15984.0,
"max": 34308.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.1617648953147102,
"min": -1.0000000521540642,
"max": 1.1617648953147102,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 66.22059903293848,
"min": -32.000001668930054,
"max": 66.22059903293848,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.1617648953147102,
"min": -1.0000000521540642,
"max": 1.1617648953147102,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 66.22059903293848,
"min": -32.000001668930054,
"max": 66.22059903293848,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.05056268563534934,
"min": 0.05056268563534934,
"max": 9.58631225116551,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.8820730812149122,
"min": 2.8820730812149122,
"max": 153.38099601864815,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1751280620",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.7.1+cu126",
"numpy_version": "1.23.5",
"end_time_seconds": "1751282816"
},
"total": 2195.576563118,
"count": 1,
"self": 0.5288320109998494,
"children": {
"run_training.setup": {
"total": 0.01944851899997957,
"count": 1,
"self": 0.01944851899997957
},
"TrainerController.start_learning": {
"total": 2195.0282825880004,
"count": 1,
"self": 1.5870483649919152,
"children": {
"TrainerController._reset_env": {
"total": 2.175566789000186,
"count": 1,
"self": 2.175566789000186
},
"TrainerController.advance": {
"total": 2191.1760364130087,
"count": 63348,
"self": 1.7054617419335045,
"children": {
"env_step": {
"total": 1528.4774495610184,
"count": 63348,
"self": 1359.3763584049861,
"children": {
"SubprocessEnvManager._take_step": {
"total": 168.09879207100607,
"count": 63348,
"self": 4.979234578034038,
"children": {
"TorchPolicy.evaluate": {
"total": 163.11955749297204,
"count": 62558,
"self": 163.11955749297204
}
}
},
"workers": {
"total": 1.0022990850261522,
"count": 63348,
"self": 0.0,
"children": {
"worker_root": {
"total": 2189.1492704051,
"count": 63348,
"is_parallel": true,
"self": 954.4946783750713,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017755220001163252,
"count": 1,
"is_parallel": true,
"self": 0.0005649670004004292,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001210554999715896,
"count": 8,
"is_parallel": true,
"self": 0.001210554999715896
}
}
},
"UnityEnvironment.step": {
"total": 0.048064225999951304,
"count": 1,
"is_parallel": true,
"self": 0.0005286070002057386,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004211209998175036,
"count": 1,
"is_parallel": true,
"self": 0.0004211209998175036
},
"communicator.exchange": {
"total": 0.04531214300004649,
"count": 1,
"is_parallel": true,
"self": 0.04531214300004649
},
"steps_from_proto": {
"total": 0.0018023549998815724,
"count": 1,
"is_parallel": true,
"self": 0.0003628080000908085,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001439546999790764,
"count": 8,
"is_parallel": true,
"self": 0.001439546999790764
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1234.6545920300289,
"count": 63347,
"is_parallel": true,
"self": 33.66258004015049,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 24.020878633020857,
"count": 63347,
"is_parallel": true,
"self": 24.020878633020857
},
"communicator.exchange": {
"total": 1074.6173034189362,
"count": 63347,
"is_parallel": true,
"self": 1074.6173034189362
},
"steps_from_proto": {
"total": 102.35382993792132,
"count": 63347,
"is_parallel": true,
"self": 21.369895891964234,
"children": {
"_process_rank_one_or_two_observation": {
"total": 80.98393404595708,
"count": 506776,
"is_parallel": true,
"self": 80.98393404595708
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 660.993125110057,
"count": 63348,
"self": 2.9881354040155657,
"children": {
"process_trajectory": {
"total": 127.02141311804576,
"count": 63348,
"self": 126.76964696304572,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2517661550000412,
"count": 2,
"self": 0.2517661550000412
}
}
},
"_update_policy": {
"total": 530.9835765879957,
"count": 441,
"self": 295.03956352500404,
"children": {
"TorchPPOOptimizer.update": {
"total": 235.94401306299164,
"count": 22791,
"self": 235.94401306299164
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0459998520673253e-06,
"count": 1,
"self": 1.0459998520673253e-06
},
"TrainerController._save_models": {
"total": 0.08962997499975245,
"count": 1,
"self": 0.0013787949997094984,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08825118000004295,
"count": 1,
"self": 0.08825118000004295
}
}
}
}
}
}
}