ppo-Pyramids / run_logs /timers.json
CocosNucifera's picture
First Push
26a45a7 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.41994068026542664,
"min": 0.41994068026542664,
"max": 1.551714539527893,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 12497.4345703125,
"min": 12497.4345703125,
"max": 47072.8125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989884.0,
"min": 29952.0,
"max": 989884.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989884.0,
"min": 29952.0,
"max": 989884.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.902003824710846,
"min": -0.13583017885684967,
"max": 0.902003824710846,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 255.26708984375,
"min": -32.73507308959961,
"max": 255.26708984375,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.017675092443823814,
"min": 0.01516564842313528,
"max": 0.21135880053043365,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 5.00205135345459,
"min": 4.049228191375732,
"max": 50.937469482421875,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06823321763243127,
"min": 0.06724950507674034,
"max": 0.07581049061181679,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.4776325234270189,
"min": 0.281222996790376,
"max": 0.5898784818564309,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015233842444805693,
"min": 0.0001892465855652302,
"max": 0.015233842444805693,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.10663689711363986,
"min": 0.0011354795133913813,
"max": 0.10663689711363986,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 1.5817125935257134e-05,
"min": 1.5817125935257134e-05,
"max": 0.0005908992015167998,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011071988154679995,
"min": 0.00011071988154679995,
"max": 0.0036382206936299,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10263617142857144,
"min": 0.10263617142857144,
"max": 0.19848319999999997,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 0.7184532000000001,
"min": 0.7184532000000001,
"max": 1.3958485,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00027335352571428563,
"min": 0.00027335352571428563,
"max": 0.009848471680000002,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0019134746799999995,
"min": 0.0019134746799999995,
"max": 0.06064637299,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.006166991777718067,
"min": 0.006166991777718067,
"max": 0.2599059045314789,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.043168943375349045,
"min": 0.043168943375349045,
"max": 1.0396236181259155,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 330.9,
"min": 320.72727272727275,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 33090.0,
"min": 15984.0,
"max": 33090.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5890819773077964,
"min": -1.0000000521540642,
"max": 1.6110658950426362,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 158.90819773077965,
"min": -30.538001716136932,
"max": 158.90819773077965,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5890819773077964,
"min": -1.0000000521540642,
"max": 1.6110658950426362,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 158.90819773077965,
"min": -30.538001716136932,
"max": 158.90819773077965,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.021870249616877117,
"min": 0.021870249616877117,
"max": 7.622059701476246,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.1870249616877118,
"min": 1.9785366261785384,
"max": 121.95295522361994,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1710753241",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=PyramidsTraining --no-graphics --force",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1710755840"
},
"total": 2599.790075875,
"count": 1,
"self": 0.8725405100012722,
"children": {
"run_training.setup": {
"total": 0.05263306199958606,
"count": 1,
"self": 0.05263306199958606
},
"TrainerController.start_learning": {
"total": 2598.8649023029993,
"count": 1,
"self": 1.5426000491042942,
"children": {
"TrainerController._reset_env": {
"total": 2.123081154000829,
"count": 1,
"self": 2.123081154000829
},
"TrainerController.advance": {
"total": 2595.0717567448937,
"count": 63930,
"self": 1.5202277659027459,
"children": {
"env_step": {
"total": 1604.6939653011868,
"count": 63930,
"self": 1464.4834829431948,
"children": {
"SubprocessEnvManager._take_step": {
"total": 139.27501445393136,
"count": 63930,
"self": 4.9969301359051315,
"children": {
"TorchPolicy.evaluate": {
"total": 134.27808431802623,
"count": 62549,
"self": 134.27808431802623
}
}
},
"workers": {
"total": 0.9354679040607152,
"count": 63930,
"self": 0.0,
"children": {
"worker_root": {
"total": 2593.2434086469657,
"count": 63930,
"is_parallel": true,
"self": 1258.1205575938802,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0022402710001188098,
"count": 1,
"is_parallel": true,
"self": 0.0006676819994027028,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001572589000716107,
"count": 8,
"is_parallel": true,
"self": 0.001572589000716107
}
}
},
"UnityEnvironment.step": {
"total": 0.059136515999853145,
"count": 1,
"is_parallel": true,
"self": 0.0007317520012293244,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005336119993444299,
"count": 1,
"is_parallel": true,
"self": 0.0005336119993444299
},
"communicator.exchange": {
"total": 0.05592021200027375,
"count": 1,
"is_parallel": true,
"self": 0.05592021200027375
},
"steps_from_proto": {
"total": 0.0019509399990056409,
"count": 1,
"is_parallel": true,
"self": 0.0004174999958195258,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001533440003186115,
"count": 8,
"is_parallel": true,
"self": 0.001533440003186115
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1335.1228510530855,
"count": 63929,
"is_parallel": true,
"self": 36.146047124886536,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 25.554368457811506,
"count": 63929,
"is_parallel": true,
"self": 25.554368457811506
},
"communicator.exchange": {
"total": 1165.8202195499944,
"count": 63929,
"is_parallel": true,
"self": 1165.8202195499944
},
"steps_from_proto": {
"total": 107.60221592039306,
"count": 63929,
"is_parallel": true,
"self": 22.09969272111448,
"children": {
"_process_rank_one_or_two_observation": {
"total": 85.50252319927858,
"count": 511432,
"is_parallel": true,
"self": 85.50252319927858
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 988.8575636778041,
"count": 63930,
"self": 2.8985726268674625,
"children": {
"process_trajectory": {
"total": 133.4653514649217,
"count": 63930,
"self": 133.2155993949218,
"children": {
"RLTrainer._checkpoint": {
"total": 0.24975206999988586,
"count": 2,
"self": 0.24975206999988586
}
}
},
"_update_policy": {
"total": 852.493639586015,
"count": 233,
"self": 498.28564872619427,
"children": {
"TorchPPOOptimizer.update": {
"total": 354.2079908598207,
"count": 38495,
"self": 354.2079908598207
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.2920008884975687e-06,
"count": 1,
"self": 1.2920008884975687e-06
},
"TrainerController._save_models": {
"total": 0.12746306299959542,
"count": 1,
"self": 0.0026267180001013912,
"children": {
"RLTrainer._checkpoint": {
"total": 0.12483634499949403,
"count": 1,
"self": 0.12483634499949403
}
}
}
}
}
}
}