ppo-Pyramids / run_logs /timers.json
Arseni10Lk's picture
First Push
50df0fc verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.269464373588562,
"min": 0.2650339901447296,
"max": 1.5006886720657349,
"count": 55
},
"Pyramids.Policy.Entropy.sum": {
"value": 8165.8486328125,
"min": 7977.322265625,
"max": 45524.890625,
"count": 55
},
"Pyramids.Step.mean": {
"value": 1649949.0,
"min": 29952.0,
"max": 1649949.0,
"count": 55
},
"Pyramids.Step.sum": {
"value": 1649949.0,
"min": 29952.0,
"max": 1649949.0,
"count": 55
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.809245765209198,
"min": -0.06885088980197906,
"max": 0.809245765209198,
"count": 55
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 241.9644775390625,
"min": -16.59306526184082,
"max": 241.9644775390625,
"count": 55
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.035112764686346054,
"min": -0.01266183890402317,
"max": 0.25483280420303345,
"count": 55
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 10.498716354370117,
"min": -3.3047399520874023,
"max": 61.66953659057617,
"count": 55
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06762020323575209,
"min": 0.06461597278532628,
"max": 0.07387429472811896,
"count": 55
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9466828453005292,
"min": 0.489250494796119,
"max": 1.0445018669147519,
"count": 55
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.013338210148504004,
"min": 0.0005386727252210296,
"max": 0.016421273174013807,
"count": 55
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.18673494207905605,
"min": 0.006464072702652355,
"max": 0.23411502497037873,
"count": 55
},
"Pyramids.Policy.LearningRate.mean": {
"value": 0.0001365119544960333,
"min": 0.0001365119544960333,
"max": 0.00029838354339596195,
"count": 55
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.0019111673629444664,
"min": 0.0019111673629444664,
"max": 0.003982233072589033,
"count": 55
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.14550396666666668,
"min": 0.14550396666666668,
"max": 0.19946118095238097,
"count": 55
},
"Pyramids.Policy.Epsilon.sum": {
"value": 2.0370555333333336,
"min": 1.3962282666666668,
"max": 2.8274109666666667,
"count": 55
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0045558462699999995,
"min": 0.0045558462699999995,
"max": 0.009946171977142856,
"count": 55
},
"Pyramids.Policy.Beta.sum": {
"value": 0.06378184778,
"min": 0.06378184778,
"max": 0.13275835557,
"count": 55
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012130248360335827,
"min": 0.01125478744506836,
"max": 0.3685508370399475,
"count": 55
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.16982348263263702,
"min": 0.15783996880054474,
"max": 2.5798559188842773,
"count": 55
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 233.5079365079365,
"min": 233.5079365079365,
"max": 999.0,
"count": 55
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29422.0,
"min": 15984.0,
"max": 32327.0,
"count": 55
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.7664920509098068,
"min": -1.0000000521540642,
"max": 1.7664920509098068,
"count": 55
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 222.57799841463566,
"min": -31.997201651334763,
"max": 222.57799841463566,
"count": 55
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.7664920509098068,
"min": -1.0000000521540642,
"max": 1.7664920509098068,
"count": 55
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 222.57799841463566,
"min": -31.997201651334763,
"max": 222.57799841463566,
"count": 55
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.029243690274020153,
"min": 0.029243690274020153,
"max": 6.4307475266978145,
"count": 55
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.6847049745265394,
"min": 3.3811807200836483,
"max": 102.89196042716503,
"count": 55
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 55
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 55
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1771949867",
"python_version": "3.10.10 (main, Mar 21 2023, 18:45:11) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.8.0+cu128",
"numpy_version": "1.23.5",
"end_time_seconds": "1771954044"
},
"total": 4177.5134517,
"count": 1,
"self": 0.7402511000000231,
"children": {
"run_training.setup": {
"total": 0.032688539999981,
"count": 1,
"self": 0.032688539999981
},
"TrainerController.start_learning": {
"total": 4176.74051206,
"count": 1,
"self": 3.615948418953849,
"children": {
"TrainerController._reset_env": {
"total": 4.283828176000043,
"count": 1,
"self": 4.283828176000043
},
"TrainerController.advance": {
"total": 4168.672299668045,
"count": 108442,
"self": 3.563682548006909,
"children": {
"env_step": {
"total": 2877.2120407600246,
"count": 108442,
"self": 2643.972967912051,
"children": {
"SubprocessEnvManager._take_step": {
"total": 230.96271487799822,
"count": 108442,
"self": 8.335680869993212,
"children": {
"TorchPolicy.evaluate": {
"total": 222.627034008005,
"count": 105006,
"self": 222.627034008005
}
}
},
"workers": {
"total": 2.2763579699752654,
"count": 108441,
"self": 0.0,
"children": {
"worker_root": {
"total": 4166.260338396049,
"count": 108441,
"is_parallel": true,
"self": 1774.4157422859535,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.00897214899998744,
"count": 1,
"is_parallel": true,
"self": 0.006127012000206378,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0028451369997810616,
"count": 8,
"is_parallel": true,
"self": 0.0028451369997810616
}
}
},
"UnityEnvironment.step": {
"total": 0.06351107000000411,
"count": 1,
"is_parallel": true,
"self": 0.000558561000161717,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004927709999265062,
"count": 1,
"is_parallel": true,
"self": 0.0004927709999265062
},
"communicator.exchange": {
"total": 0.060867268999913904,
"count": 1,
"is_parallel": true,
"self": 0.060867268999913904
},
"steps_from_proto": {
"total": 0.0015924690000019837,
"count": 1,
"is_parallel": true,
"self": 0.00037140899985388387,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012210600001480998,
"count": 8,
"is_parallel": true,
"self": 0.0012210600001480998
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 2391.844596110095,
"count": 108440,
"is_parallel": true,
"self": 62.887925377081956,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 44.14582356704125,
"count": 108440,
"is_parallel": true,
"self": 44.14582356704125
},
"communicator.exchange": {
"total": 2111.871402729024,
"count": 108440,
"is_parallel": true,
"self": 2111.871402729024
},
"steps_from_proto": {
"total": 172.93944443694795,
"count": 108440,
"is_parallel": true,
"self": 38.400617932108275,
"children": {
"_process_rank_one_or_two_observation": {
"total": 134.53882650483968,
"count": 867520,
"is_parallel": true,
"self": 134.53882650483968
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1287.8965763600131,
"count": 108441,
"self": 7.161603354101317,
"children": {
"process_trajectory": {
"total": 217.49613446292005,
"count": 108441,
"self": 217.08926000191923,
"children": {
"RLTrainer._checkpoint": {
"total": 0.40687446100082525,
"count": 3,
"self": 0.40687446100082525
}
}
},
"_update_policy": {
"total": 1063.2388385429917,
"count": 773,
"self": 413.9084696850158,
"children": {
"TorchPPOOptimizer.update": {
"total": 649.3303688579759,
"count": 38265,
"self": 649.3303688579759
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.920000613608863e-06,
"count": 1,
"self": 1.920000613608863e-06
},
"TrainerController._save_models": {
"total": 0.1684338770000977,
"count": 1,
"self": 0.01432237900007749,
"children": {
"RLTrainer._checkpoint": {
"total": 0.15411149800002022,
"count": 1,
"self": 0.15411149800002022
}
}
}
}
}
}
}