ppo-Pyramids / run_logs /timers.json
Zayneb17's picture
First Push
c1d8b7b verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.46428897976875305,
"min": 0.4630119502544403,
"max": 1.4515272378921509,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 13936.09765625,
"min": 13882.9501953125,
"max": 44033.53125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989963.0,
"min": 29952.0,
"max": 989963.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989963.0,
"min": 29952.0,
"max": 989963.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5827290415763855,
"min": -0.09187527745962143,
"max": 0.6068825125694275,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 160.83322143554688,
"min": -22.325693130493164,
"max": 172.35462951660156,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.02060249075293541,
"min": -0.03604922816157341,
"max": 0.5022661685943604,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 5.6862874031066895,
"min": -9.192553520202637,
"max": 119.03707885742188,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06793002985137893,
"min": 0.06579567675602833,
"max": 0.07442061584897332,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.951020417919305,
"min": 0.5209443109428132,
"max": 1.0631588473479496,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.01580115235299759,
"min": 0.0007982772323396237,
"max": 0.015984586886285495,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.22121613294196624,
"min": 0.008781049555735861,
"max": 0.2397688032942824,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.394754677971427e-06,
"min": 7.394754677971427e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010352656549159998,
"min": 0.00010352656549159998,
"max": 0.0033809651730117,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10246488571428573,
"min": 0.10246488571428573,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4345084000000003,
"min": 1.3886848,
"max": 2.5269883,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002562420828571428,
"min": 0.0002562420828571428,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035873891599999997,
"min": 0.0035873891599999997,
"max": 0.11272613116999998,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.010695296339690685,
"min": 0.010462283156812191,
"max": 0.5908074975013733,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.14973415434360504,
"min": 0.14647196233272552,
"max": 4.135652542114258,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 337.69662921348316,
"min": 308.0108695652174,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30055.0,
"min": 15984.0,
"max": 33437.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6173370539137486,
"min": -1.0000000521540642,
"max": 1.6783521554392318,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 143.94299779832363,
"min": -28.513401605188847,
"max": 160.71039752662182,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6173370539137486,
"min": -1.0000000521540642,
"max": 1.6783521554392318,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 143.94299779832363,
"min": -28.513401605188847,
"max": 160.71039752662182,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03749218352254757,
"min": 0.033715860175779315,
"max": 12.377977542579174,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.336804333506734,
"min": 3.101859136171697,
"max": 198.04764068126678,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1754705502",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics --force",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.8.0+cu128",
"numpy_version": "1.23.5",
"end_time_seconds": "1754709232"
},
"total": 3729.738556303999,
"count": 1,
"self": 0.6445959319989925,
"children": {
"run_training.setup": {
"total": 0.038446603999545914,
"count": 1,
"self": 0.038446603999545914
},
"TrainerController.start_learning": {
"total": 3729.0555137680003,
"count": 1,
"self": 2.28963634012689,
"children": {
"TrainerController._reset_env": {
"total": 3.887441883000065,
"count": 1,
"self": 3.887441883000065
},
"TrainerController.advance": {
"total": 3722.7952840078706,
"count": 63884,
"self": 2.3439564358723146,
"children": {
"env_step": {
"total": 2588.390239493043,
"count": 63884,
"self": 2424.6425564839083,
"children": {
"SubprocessEnvManager._take_step": {
"total": 162.19891957393338,
"count": 63884,
"self": 6.82594165316732,
"children": {
"TorchPolicy.evaluate": {
"total": 155.37297792076606,
"count": 62557,
"self": 155.37297792076606
}
}
},
"workers": {
"total": 1.548763435201181,
"count": 63884,
"self": 0.0,
"children": {
"worker_root": {
"total": 3720.439933919284,
"count": 63884,
"is_parallel": true,
"self": 1483.6614925151225,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.008257895000497228,
"count": 1,
"is_parallel": true,
"self": 0.0008630500033177668,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.007394844997179462,
"count": 8,
"is_parallel": true,
"self": 0.007394844997179462
}
}
},
"UnityEnvironment.step": {
"total": 0.1092762340012996,
"count": 1,
"is_parallel": true,
"self": 0.0007252190007420722,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005405150004662573,
"count": 1,
"is_parallel": true,
"self": 0.0005405150004662573
},
"communicator.exchange": {
"total": 0.10602921700046863,
"count": 1,
"is_parallel": true,
"self": 0.10602921700046863
},
"steps_from_proto": {
"total": 0.0019812829996226355,
"count": 1,
"is_parallel": true,
"self": 0.0004126959993300261,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0015685870002926094,
"count": 8,
"is_parallel": true,
"self": 0.0015685870002926094
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 2236.7784414041616,
"count": 63883,
"is_parallel": true,
"self": 51.414079004200175,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 33.57934539992493,
"count": 63883,
"is_parallel": true,
"self": 33.57934539992493
},
"communicator.exchange": {
"total": 2006.5745213833197,
"count": 63883,
"is_parallel": true,
"self": 2006.5745213833197
},
"steps_from_proto": {
"total": 145.2104956167168,
"count": 63883,
"is_parallel": true,
"self": 30.43937519780593,
"children": {
"_process_rank_one_or_two_observation": {
"total": 114.77112041891087,
"count": 511064,
"is_parallel": true,
"self": 114.77112041891087
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1132.0610880789554,
"count": 63884,
"self": 4.10727623512139,
"children": {
"process_trajectory": {
"total": 168.6640824258193,
"count": 63884,
"self": 168.44927080281923,
"children": {
"RLTrainer._checkpoint": {
"total": 0.21481162300005963,
"count": 2,
"self": 0.21481162300005963
}
}
},
"_update_policy": {
"total": 959.2897294180148,
"count": 450,
"self": 392.01234423113965,
"children": {
"TorchPPOOptimizer.update": {
"total": 567.2773851868751,
"count": 22809,
"self": 567.2773851868751
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.4160013961372897e-06,
"count": 1,
"self": 1.4160013961372897e-06
},
"TrainerController._save_models": {
"total": 0.0831501210013812,
"count": 1,
"self": 0.0018303590004506987,
"children": {
"RLTrainer._checkpoint": {
"total": 0.0813197620009305,
"count": 1,
"self": 0.0813197620009305
}
}
}
}
}
}
}