ppo-PyramidsRND / run_logs /timers.json
KhanLee0930's picture
First Push
0a3c49d verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.3781135678291321,
"min": 0.3781135678291321,
"max": 1.4307013750076294,
"count": 41
},
"Pyramids.Policy.Entropy.sum": {
"value": 11409.955078125,
"min": 11409.955078125,
"max": 43401.7578125,
"count": 41
},
"Pyramids.Step.mean": {
"value": 1229913.0,
"min": 29952.0,
"max": 1229913.0,
"count": 41
},
"Pyramids.Step.sum": {
"value": 1229913.0,
"min": 29952.0,
"max": 1229913.0,
"count": 41
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.6381216049194336,
"min": -0.10954690724611282,
"max": 0.6996257305145264,
"count": 41
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 179.95028686523438,
"min": -26.291257858276367,
"max": 197.99407958984375,
"count": 41
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.007804469671100378,
"min": -0.024467280134558678,
"max": 0.379982054233551,
"count": 41
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -2.2008605003356934,
"min": -6.679567337036133,
"max": 91.19569396972656,
"count": 41
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06792121414077716,
"min": 0.06460621190905971,
"max": 0.07395365576528656,
"count": 41
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9508969979708803,
"min": 0.4869719651820748,
"max": 1.0558458053856157,
"count": 41
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.013743468947840348,
"min": 0.00025126062269508457,
"max": 0.013744519731896953,
"count": 41
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.19240856526976488,
"min": 0.003015127472341015,
"max": 0.19854017142400457,
"count": 41
},
"Pyramids.Policy.LearningRate.mean": {
"value": 0.00017849141193144758,
"min": 0.00017849141193144758,
"max": 0.00029838354339596195,
"count": 41
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.0024988797670402662,
"min": 0.0020691136102954665,
"max": 0.003969489576836833,
"count": 41
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.15949712380952383,
"min": 0.15949712380952383,
"max": 0.19946118095238097,
"count": 41
},
"Pyramids.Policy.Epsilon.sum": {
"value": 2.2329597333333338,
"min": 1.3897045333333333,
"max": 2.7825489,
"count": 41
},
"Pyramids.Policy.Beta.mean": {
"value": 0.005953762668571429,
"min": 0.005953762668571429,
"max": 0.009946171977142856,
"count": 41
},
"Pyramids.Policy.Beta.sum": {
"value": 0.08335267736,
"min": 0.06897148288,
"max": 0.13232400035,
"count": 41
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.006227504927664995,
"min": 0.006227504927664995,
"max": 0.5055810809135437,
"count": 41
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.08718506991863251,
"min": 0.08718506991863251,
"max": 3.5390677452087402,
"count": 41
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 294.3679245283019,
"min": 294.3679245283019,
"max": 999.0,
"count": 41
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31203.0,
"min": 15984.0,
"max": 32139.0,
"count": 41
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6678829978237737,
"min": -1.0000000521540642,
"max": 1.6784164721203834,
"count": 41
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 176.79559776932,
"min": -32.000001668930054,
"max": 176.79559776932,
"count": 41
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6678829978237737,
"min": -1.0000000521540642,
"max": 1.6784164721203834,
"count": 41
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 176.79559776932,
"min": -32.000001668930054,
"max": 176.79559776932,
"count": 41
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.019061806392751606,
"min": 0.019061806392751606,
"max": 9.910126306116581,
"count": 41
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.0205514776316704,
"min": 1.9438937461018213,
"max": 158.5620208978653,
"count": 41
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 41
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 41
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1719478457",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.0+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1719481281"
},
"total": 2824.4943215430003,
"count": 1,
"self": 0.5522601980010222,
"children": {
"run_training.setup": {
"total": 0.05328816699989147,
"count": 1,
"self": 0.05328816699989147
},
"TrainerController.start_learning": {
"total": 2823.8887731779996,
"count": 1,
"self": 1.817783254080041,
"children": {
"TrainerController._reset_env": {
"total": 2.5952947089999725,
"count": 1,
"self": 2.5952947089999725
},
"TrainerController.advance": {
"total": 2819.3444933879196,
"count": 78810,
"self": 1.7989594930245403,
"children": {
"env_step": {
"total": 2037.9183940529354,
"count": 78810,
"self": 1867.8794562079274,
"children": {
"SubprocessEnvManager._take_step": {
"total": 168.9795233120285,
"count": 78810,
"self": 5.889936549074491,
"children": {
"TorchPolicy.evaluate": {
"total": 163.089586762954,
"count": 76980,
"self": 163.089586762954
}
}
},
"workers": {
"total": 1.0594145329794173,
"count": 78809,
"self": 0.0,
"children": {
"worker_root": {
"total": 2817.0680204399755,
"count": 78809,
"is_parallel": true,
"self": 1106.474099785943,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0020282610000776913,
"count": 1,
"is_parallel": true,
"self": 0.0005942040002082649,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014340569998694264,
"count": 8,
"is_parallel": true,
"self": 0.0014340569998694264
}
}
},
"UnityEnvironment.step": {
"total": 0.06506225999987691,
"count": 1,
"is_parallel": true,
"self": 0.0006208469999364752,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00046270400002867973,
"count": 1,
"is_parallel": true,
"self": 0.00046270400002867973
},
"communicator.exchange": {
"total": 0.062499590000015814,
"count": 1,
"is_parallel": true,
"self": 0.062499590000015814
},
"steps_from_proto": {
"total": 0.0014791189998959453,
"count": 1,
"is_parallel": true,
"self": 0.000305437000179154,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011736819997167913,
"count": 8,
"is_parallel": true,
"self": 0.0011736819997167913
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1710.5939206540324,
"count": 78808,
"is_parallel": true,
"self": 42.12445150104895,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 29.185929454960842,
"count": 78808,
"is_parallel": true,
"self": 29.185929454960842
},
"communicator.exchange": {
"total": 1514.2816479239737,
"count": 78808,
"is_parallel": true,
"self": 1514.2816479239737
},
"steps_from_proto": {
"total": 125.001891774049,
"count": 78808,
"is_parallel": true,
"self": 26.01018322192772,
"children": {
"_process_rank_one_or_two_observation": {
"total": 98.99170855212128,
"count": 630464,
"is_parallel": true,
"self": 98.99170855212128
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 779.6271398419594,
"count": 78809,
"self": 3.4709116989604354,
"children": {
"process_trajectory": {
"total": 158.66475321699522,
"count": 78809,
"self": 158.45334494299482,
"children": {
"RLTrainer._checkpoint": {
"total": 0.21140827400040507,
"count": 2,
"self": 0.21140827400040507
}
}
},
"_update_policy": {
"total": 617.4914749260038,
"count": 560,
"self": 364.7966669789571,
"children": {
"TorchPPOOptimizer.update": {
"total": 252.69480794704668,
"count": 28074,
"self": 252.69480794704668
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.669000084802974e-06,
"count": 1,
"self": 1.669000084802974e-06
},
"TrainerController._save_models": {
"total": 0.131200157999956,
"count": 1,
"self": 0.002073921000373957,
"children": {
"RLTrainer._checkpoint": {
"total": 0.12912623699958203,
"count": 1,
"self": 0.12912623699958203
}
}
}
}
}
}
}