ppo-Pyramid / run_logs /timers.json
hugging-robot's picture
First Push
a6272fe verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.6756361126899719,
"min": 0.6701516509056091,
"max": 1.4935383796691895,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 20333.9453125,
"min": 20115.271484375,
"max": 45307.98046875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989922.0,
"min": 29974.0,
"max": 989922.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989922.0,
"min": 29974.0,
"max": 989922.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.18971240520477295,
"min": -0.11628411710262299,
"max": 0.18971240520477295,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 48.18695068359375,
"min": -27.908187866210938,
"max": 48.18695068359375,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.017311885952949524,
"min": -0.017311885952949524,
"max": 0.3339952826499939,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -4.397219181060791,
"min": -4.397219181060791,
"max": 79.82487487792969,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06804686577088023,
"min": 0.06244572261530101,
"max": 0.07271378252218956,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9526561207923233,
"min": 0.5693039985443198,
"max": 1.0337674864276778,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.009808101011064742,
"min": 4.252900801812795e-05,
"max": 0.009808101011064742,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.1373134141549064,
"min": 0.0005528771042356633,
"max": 0.1373134141549064,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.687318866164289e-06,
"min": 7.687318866164289e-06,
"max": 0.000294771226742925,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010762246412630004,
"min": 0.00010762246412630004,
"max": 0.0033760123746625994,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10256240714285715,
"min": 0.10256240714285715,
"max": 0.198257075,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4358737000000001,
"min": 1.4358737000000001,
"max": 2.4253374,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026598447357142863,
"min": 0.00026598447357142863,
"max": 0.009825881792500001,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003723782630000001,
"min": 0.003723782630000001,
"max": 0.11255120626000002,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012228773906826973,
"min": 0.012228773906826973,
"max": 0.3726975917816162,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.17120283842086792,
"min": 0.17120283842086792,
"max": 2.9815807342529297,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 651.8695652173913,
"min": 650.5744680851063,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29986.0,
"min": 17412.0,
"max": 33091.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 0.8262260561080083,
"min": -0.9999862593309633,
"max": 0.8262260561080083,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 38.00639858096838,
"min": -31.9992016851902,
"max": 38.00639858096838,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 0.8262260561080083,
"min": -0.9999862593309633,
"max": 0.8262260561080083,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 38.00639858096838,
"min": -31.9992016851902,
"max": 38.00639858096838,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.08246177341156315,
"min": 0.08246177341156315,
"max": 7.827282040431681,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.793241576931905,
"min": 3.793241576931905,
"max": 140.89107672777027,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1723097018",
"python_version": "3.10.12 (main, Jul 29 2024, 16:56:48) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1723099058"
},
"total": 2039.6740329119998,
"count": 1,
"self": 0.4759332749997611,
"children": {
"run_training.setup": {
"total": 0.05165100399995026,
"count": 1,
"self": 0.05165100399995026
},
"TrainerController.start_learning": {
"total": 2039.146448633,
"count": 1,
"self": 1.417873042025576,
"children": {
"TrainerController._reset_env": {
"total": 2.0833010549999926,
"count": 1,
"self": 2.0833010549999926
},
"TrainerController.advance": {
"total": 2035.5592469469746,
"count": 63118,
"self": 1.516181049967372,
"children": {
"env_step": {
"total": 1401.0235454000162,
"count": 63118,
"self": 1264.1000981470065,
"children": {
"SubprocessEnvManager._take_step": {
"total": 136.0496659569909,
"count": 63118,
"self": 4.664039991971663,
"children": {
"TorchPolicy.evaluate": {
"total": 131.38562596501924,
"count": 62571,
"self": 131.38562596501924
}
}
},
"workers": {
"total": 0.8737812960187057,
"count": 63118,
"self": 0.0,
"children": {
"worker_root": {
"total": 2034.123707210017,
"count": 63118,
"is_parallel": true,
"self": 895.2181351230279,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0020446720000109053,
"count": 1,
"is_parallel": true,
"self": 0.0006199140002536296,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014247579997572757,
"count": 8,
"is_parallel": true,
"self": 0.0014247579997572757
}
}
},
"UnityEnvironment.step": {
"total": 0.04945018400007939,
"count": 1,
"is_parallel": true,
"self": 0.0006115370000543408,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004287250000061249,
"count": 1,
"is_parallel": true,
"self": 0.0004287250000061249
},
"communicator.exchange": {
"total": 0.04678344299998116,
"count": 1,
"is_parallel": true,
"self": 0.04678344299998116
},
"steps_from_proto": {
"total": 0.0016264790000377616,
"count": 1,
"is_parallel": true,
"self": 0.0003340649998335721,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012924140002041895,
"count": 8,
"is_parallel": true,
"self": 0.0012924140002041895
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1138.9055720869892,
"count": 63117,
"is_parallel": true,
"self": 33.51948442598314,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 23.53362026202126,
"count": 63117,
"is_parallel": true,
"self": 23.53362026202126
},
"communicator.exchange": {
"total": 981.8826445739744,
"count": 63117,
"is_parallel": true,
"self": 981.8826445739744
},
"steps_from_proto": {
"total": 99.96982282501051,
"count": 63117,
"is_parallel": true,
"self": 20.610159356888403,
"children": {
"_process_rank_one_or_two_observation": {
"total": 79.35966346812211,
"count": 504936,
"is_parallel": true,
"self": 79.35966346812211
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 633.0195204969909,
"count": 63118,
"self": 2.519143280954154,
"children": {
"process_trajectory": {
"total": 126.85651098603148,
"count": 63118,
"self": 126.6546695240313,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20184146200017494,
"count": 2,
"self": 0.20184146200017494
}
}
},
"_update_policy": {
"total": 503.6438662300053,
"count": 439,
"self": 298.25273636900215,
"children": {
"TorchPPOOptimizer.update": {
"total": 205.39112986100315,
"count": 22839,
"self": 205.39112986100315
}
}
}
}
}
}
},
"trainer_threads": {
"total": 8.829997568682302e-07,
"count": 1,
"self": 8.829997568682302e-07
},
"TrainerController._save_models": {
"total": 0.08602670600021156,
"count": 1,
"self": 0.0015569390002383443,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08446976699997322,
"count": 1,
"self": 0.08446976699997322
}
}
}
}
}
}
}