Pyramids / run_logs /timers.json
ThomasDH's picture
First Push
1812bcc
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.36147454380989075,
"min": 0.36147454380989075,
"max": 1.4426158666610718,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 10797.9677734375,
"min": 10797.9677734375,
"max": 43763.1953125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989916.0,
"min": 29952.0,
"max": 989916.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989916.0,
"min": 29952.0,
"max": 989916.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5463314056396484,
"min": -0.1288667470216751,
"max": 0.5953317284584045,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 151.33380126953125,
"min": -31.056884765625,
"max": 167.28822326660156,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.013992258347570896,
"min": -0.022540627047419548,
"max": 0.4190542697906494,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 3.8758554458618164,
"min": -5.928184986114502,
"max": 99.31586456298828,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07053164289321838,
"min": 0.06631826804507346,
"max": 0.07365102852210316,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9874430005050574,
"min": 0.47997839247136137,
"max": 1.085693704718199,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015366178807328523,
"min": 0.00016696135665568584,
"max": 0.015366178807328523,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.21512650330259933,
"min": 0.002170497636523916,
"max": 0.21512650330259933,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.603254608471431e-06,
"min": 7.603254608471431e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010644556451860003,
"min": 0.00010644556451860003,
"max": 0.003632786289071299,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10253438571428575,
"min": 0.10253438571428575,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4354814000000005,
"min": 1.3886848,
"max": 2.6109287000000005,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002631851328571429,
"min": 0.0002631851328571429,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003684591860000001,
"min": 0.003684591860000001,
"max": 0.12111177713,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.009903405793011189,
"min": 0.009903405793011189,
"max": 0.5683497190475464,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1386476755142212,
"min": 0.1386476755142212,
"max": 3.9784481525421143,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 335.83516483516485,
"min": 305.0561797752809,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30561.0,
"min": 15984.0,
"max": 32949.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6140673788993254,
"min": -1.0000000521540642,
"max": 1.7022045302120121,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 148.49419885873795,
"min": -28.980001628398895,
"max": 149.79399865865707,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6140673788993254,
"min": -1.0000000521540642,
"max": 1.7022045302120121,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 148.49419885873795,
"min": -28.980001628398895,
"max": 149.79399865865707,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03470028074756872,
"min": 0.03162997418919324,
"max": 12.023274028673768,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.1924258287763223,
"min": 2.7834377286490053,
"max": 192.3723844587803,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1681810974",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1681813188"
},
"total": 2213.7701709559997,
"count": 1,
"self": 0.4248948009990272,
"children": {
"run_training.setup": {
"total": 0.1030936469999233,
"count": 1,
"self": 0.1030936469999233
},
"TrainerController.start_learning": {
"total": 2213.2421825080005,
"count": 1,
"self": 1.4172487690270827,
"children": {
"TrainerController._reset_env": {
"total": 3.648315263999848,
"count": 1,
"self": 3.648315263999848
},
"TrainerController.advance": {
"total": 2208.0882546369726,
"count": 63918,
"self": 1.531184398907044,
"children": {
"env_step": {
"total": 1595.2468049459626,
"count": 63918,
"self": 1485.5456798400828,
"children": {
"SubprocessEnvManager._take_step": {
"total": 108.8565487019273,
"count": 63918,
"self": 4.9230100419795235,
"children": {
"TorchPolicy.evaluate": {
"total": 103.93353865994777,
"count": 62550,
"self": 103.93353865994777
}
}
},
"workers": {
"total": 0.8445764039524875,
"count": 63918,
"self": 0.0,
"children": {
"worker_root": {
"total": 2208.035303193981,
"count": 63918,
"is_parallel": true,
"self": 837.3531413149685,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017493459999968763,
"count": 1,
"is_parallel": true,
"self": 0.000580610000042725,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011687359999541513,
"count": 8,
"is_parallel": true,
"self": 0.0011687359999541513
}
}
},
"UnityEnvironment.step": {
"total": 0.048567487000127585,
"count": 1,
"is_parallel": true,
"self": 0.000555790999896999,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004800290000730456,
"count": 1,
"is_parallel": true,
"self": 0.0004800290000730456
},
"communicator.exchange": {
"total": 0.04584404199999881,
"count": 1,
"is_parallel": true,
"self": 0.04584404199999881
},
"steps_from_proto": {
"total": 0.0016876250001587323,
"count": 1,
"is_parallel": true,
"self": 0.0003722340006788727,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013153909994798596,
"count": 8,
"is_parallel": true,
"self": 0.0013153909994798596
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1370.6821618790123,
"count": 63917,
"is_parallel": true,
"self": 32.67942228108495,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 24.23359790693644,
"count": 63917,
"is_parallel": true,
"self": 24.23359790693644
},
"communicator.exchange": {
"total": 1215.238830165986,
"count": 63917,
"is_parallel": true,
"self": 1215.238830165986
},
"steps_from_proto": {
"total": 98.53031152500489,
"count": 63917,
"is_parallel": true,
"self": 21.50488541295067,
"children": {
"_process_rank_one_or_two_observation": {
"total": 77.02542611205422,
"count": 511336,
"is_parallel": true,
"self": 77.02542611205422
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 611.3102652921027,
"count": 63918,
"self": 2.644608702061305,
"children": {
"process_trajectory": {
"total": 107.50900305004461,
"count": 63918,
"self": 107.2543145180448,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2546885319998182,
"count": 2,
"self": 0.2546885319998182
}
}
},
"_update_policy": {
"total": 501.1566535399968,
"count": 455,
"self": 321.33220241200956,
"children": {
"TorchPPOOptimizer.update": {
"total": 179.82445112798723,
"count": 22770,
"self": 179.82445112798723
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.2910004443256184e-06,
"count": 1,
"self": 1.2910004443256184e-06
},
"TrainerController._save_models": {
"total": 0.08836254700054269,
"count": 1,
"self": 0.001336951000666886,
"children": {
"RLTrainer._checkpoint": {
"total": 0.0870255959998758,
"count": 1,
"self": 0.0870255959998758
}
}
}
}
}
}
}