ppo-PyramidsRND / run_logs /timers.json
fazito25's picture
First Push
31ddef5 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.4084797203540802,
"min": 0.4013776183128357,
"max": 1.4468796253204346,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 12306.6767578125,
"min": 12086.283203125,
"max": 43892.5390625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989936.0,
"min": 29933.0,
"max": 989936.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989936.0,
"min": 29933.0,
"max": 989936.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5081204771995544,
"min": -0.09900214523077011,
"max": 0.5792565941810608,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 138.71688842773438,
"min": -23.760515213012695,
"max": 157.55780029296875,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.011207140050828457,
"min": 0.00031018207664601505,
"max": 0.26933401823043823,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 3.059549331665039,
"min": 0.08436952531337738,
"max": 64.64016723632812,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06612420288729481,
"min": 0.06480215453704675,
"max": 0.0750653944348619,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9257388404221274,
"min": 0.5254577610440333,
"max": 1.1061366223342095,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.014967264050764718,
"min": 0.00042045791762631917,
"max": 0.016697437729302846,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.20954169671070605,
"min": 0.0042045791762631915,
"max": 0.23376412821023987,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.648576021935717e-06,
"min": 7.648576021935717e-06,
"max": 0.0002952337301601857,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010708006430710002,
"min": 0.00010708006430710002,
"max": 0.0032559677146774997,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10254949285714285,
"min": 0.10254949285714285,
"max": 0.19841124285714287,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4356928999999998,
"min": 1.3888787,
"max": 2.5722991000000004,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002646943364285715,
"min": 0.0002646943364285715,
"max": 0.009841283161428571,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003705720710000001,
"min": 0.003705720710000001,
"max": 0.10855371775,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.010171957314014435,
"min": 0.009861755184829235,
"max": 0.4606410562992096,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1424074023962021,
"min": 0.13806457817554474,
"max": 3.2244873046875,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 364.01234567901236,
"min": 345.6024096385542,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29485.0,
"min": 16732.0,
"max": 33516.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5371826994750235,
"min": -0.9999742455059483,
"max": 1.6525595099443482,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 124.5117986574769,
"min": -31.998401656746864,
"max": 138.81499883532524,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5371826994750235,
"min": -0.9999742455059483,
"max": 1.6525595099443482,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 124.5117986574769,
"min": -31.998401656746864,
"max": 138.81499883532524,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.038311669484778293,
"min": 0.037454483196294554,
"max": 8.736794147421332,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.103245228267042,
"min": 3.0550482180551626,
"max": 148.52550050616264,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1708878852",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1708880912"
},
"total": 2059.795728152,
"count": 1,
"self": 0.5386451989998022,
"children": {
"run_training.setup": {
"total": 0.04644038699962039,
"count": 1,
"self": 0.04644038699962039
},
"TrainerController.start_learning": {
"total": 2059.2106425660004,
"count": 1,
"self": 1.1940370690308555,
"children": {
"TrainerController._reset_env": {
"total": 2.41643457400005,
"count": 1,
"self": 2.41643457400005
},
"TrainerController.advance": {
"total": 2055.51728907497,
"count": 63719,
"self": 1.2848705309156685,
"children": {
"env_step": {
"total": 1452.828058919999,
"count": 63719,
"self": 1331.2349123449694,
"children": {
"SubprocessEnvManager._take_step": {
"total": 120.84432641208787,
"count": 63719,
"self": 4.514179355068791,
"children": {
"TorchPolicy.evaluate": {
"total": 116.33014705701908,
"count": 62562,
"self": 116.33014705701908
}
}
},
"workers": {
"total": 0.7488201629416835,
"count": 63719,
"self": 0.0,
"children": {
"worker_root": {
"total": 2054.607269856051,
"count": 63719,
"is_parallel": true,
"self": 830.1912026740965,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0021215060000940866,
"count": 1,
"is_parallel": true,
"self": 0.0006587680004486174,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014627379996454692,
"count": 8,
"is_parallel": true,
"self": 0.0014627379996454692
}
}
},
"UnityEnvironment.step": {
"total": 0.04814428500003487,
"count": 1,
"is_parallel": true,
"self": 0.0005750300006184261,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005009449996578041,
"count": 1,
"is_parallel": true,
"self": 0.0005009449996578041
},
"communicator.exchange": {
"total": 0.04542311499972129,
"count": 1,
"is_parallel": true,
"self": 0.04542311499972129
},
"steps_from_proto": {
"total": 0.0016451950000373472,
"count": 1,
"is_parallel": true,
"self": 0.0003696210010275536,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012755739990097936,
"count": 8,
"is_parallel": true,
"self": 0.0012755739990097936
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1224.4160671819545,
"count": 63718,
"is_parallel": true,
"self": 33.64134723086909,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 23.47814460501786,
"count": 63718,
"is_parallel": true,
"self": 23.47814460501786
},
"communicator.exchange": {
"total": 1072.6278001810106,
"count": 63718,
"is_parallel": true,
"self": 1072.6278001810106
},
"steps_from_proto": {
"total": 94.66877516505701,
"count": 63718,
"is_parallel": true,
"self": 18.26925913601508,
"children": {
"_process_rank_one_or_two_observation": {
"total": 76.39951602904193,
"count": 509744,
"is_parallel": true,
"self": 76.39951602904193
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 601.4043596240554,
"count": 63719,
"self": 2.3839150220483134,
"children": {
"process_trajectory": {
"total": 119.10497887800284,
"count": 63719,
"self": 118.91675165600327,
"children": {
"RLTrainer._checkpoint": {
"total": 0.18822722199956843,
"count": 2,
"self": 0.18822722199956843
}
}
},
"_update_policy": {
"total": 479.9154657240042,
"count": 448,
"self": 284.21174350003,
"children": {
"TorchPPOOptimizer.update": {
"total": 195.70372222397418,
"count": 22746,
"self": 195.70372222397418
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0129997463081963e-06,
"count": 1,
"self": 1.0129997463081963e-06
},
"TrainerController._save_models": {
"total": 0.08288083499974164,
"count": 1,
"self": 0.0014182699997036252,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08146256500003801,
"count": 1,
"self": 0.08146256500003801
}
}
}
}
}
}
}