ppo-Pyramid / run_logs /timers.json
dhingratul's picture
First Push
791c09f verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.4654870927333832,
"min": 0.4654870927333832,
"max": 1.4581407308578491,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 14135.912109375,
"min": 14124.5634765625,
"max": 44234.15625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989992.0,
"min": 29952.0,
"max": 989992.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989992.0,
"min": 29952.0,
"max": 989992.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.44167011976242065,
"min": -0.19474275410175323,
"max": 0.44167011976242065,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 120.13426971435547,
"min": -46.15403366088867,
"max": 120.13426971435547,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.008820760063827038,
"min": -0.008820760063827038,
"max": 0.5793318152427673,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -2.3992466926574707,
"min": -2.3992466926574707,
"max": 137.3016357421875,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07058378458087745,
"min": 0.06584759708412878,
"max": 0.07411003576707148,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.0587567687131618,
"min": 0.4762111773838867,
"max": 1.0587567687131618,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.017376152395283697,
"min": 0.00034545916569860436,
"max": 0.017376152395283697,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2606422859292555,
"min": 0.0034545916569860436,
"max": 0.2606422859292555,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.51675749444667e-06,
"min": 7.51675749444667e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011275136241670005,
"min": 0.00011275136241670005,
"max": 0.0031388825537058997,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10250555333333335,
"min": 0.10250555333333335,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5375833,
"min": 1.3886848,
"max": 2.4020898,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026030477800000014,
"min": 0.00026030477800000014,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0039045716700000024,
"min": 0.0039045716700000024,
"max": 0.10465478058999998,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.010420089587569237,
"min": 0.010420089587569237,
"max": 0.4487457275390625,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.156301349401474,
"min": 0.14787952601909637,
"max": 3.1412200927734375,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 404.012987012987,
"min": 404.012987012987,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31109.0,
"min": 15984.0,
"max": 32255.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5180493347443544,
"min": -1.0000000521540642,
"max": 1.5180493347443544,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 116.88979877531528,
"min": -31.998401656746864,
"max": 116.88979877531528,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5180493347443544,
"min": -1.0000000521540642,
"max": 1.5180493347443544,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 116.88979877531528,
"min": -31.998401656746864,
"max": 116.88979877531528,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.043499317464530965,
"min": 0.043499317464530965,
"max": 9.53220378421247,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.3494474447688845,
"min": 3.0434805232071085,
"max": 152.51526054739952,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1754173362",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.7.1+cu126",
"numpy_version": "1.23.5",
"end_time_seconds": "1754175439"
},
"total": 2076.432912056,
"count": 1,
"self": 0.4792193060002319,
"children": {
"run_training.setup": {
"total": 0.019335599999976694,
"count": 1,
"self": 0.019335599999976694
},
"TrainerController.start_learning": {
"total": 2075.93435715,
"count": 1,
"self": 1.2358541890325796,
"children": {
"TrainerController._reset_env": {
"total": 2.1664041919998454,
"count": 1,
"self": 2.1664041919998454
},
"TrainerController.advance": {
"total": 2072.455682732967,
"count": 63592,
"self": 1.2876759839177794,
"children": {
"env_step": {
"total": 1429.5913795540127,
"count": 63592,
"self": 1286.5433565610829,
"children": {
"SubprocessEnvManager._take_step": {
"total": 142.29521544195495,
"count": 63592,
"self": 4.338840300029915,
"children": {
"TorchPolicy.evaluate": {
"total": 137.95637514192504,
"count": 62566,
"self": 137.95637514192504
}
}
},
"workers": {
"total": 0.7528075509749215,
"count": 63592,
"self": 0.0,
"children": {
"worker_root": {
"total": 2070.992183068024,
"count": 63592,
"is_parallel": true,
"self": 893.9965204869445,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.001807401999940339,
"count": 1,
"is_parallel": true,
"self": 0.000592136000705068,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001215265999235271,
"count": 8,
"is_parallel": true,
"self": 0.001215265999235271
}
}
},
"UnityEnvironment.step": {
"total": 0.048128037999958906,
"count": 1,
"is_parallel": true,
"self": 0.0004953140000907297,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004383880000204954,
"count": 1,
"is_parallel": true,
"self": 0.0004383880000204954
},
"communicator.exchange": {
"total": 0.045602971999869624,
"count": 1,
"is_parallel": true,
"self": 0.045602971999869624
},
"steps_from_proto": {
"total": 0.0015913639999780571,
"count": 1,
"is_parallel": true,
"self": 0.00039308000032178825,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011982839996562689,
"count": 8,
"is_parallel": true,
"self": 0.0011982839996562689
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1176.9956625810796,
"count": 63591,
"is_parallel": true,
"self": 31.433264952083846,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 23.105595928984712,
"count": 63591,
"is_parallel": true,
"self": 23.105595928984712
},
"communicator.exchange": {
"total": 1028.5575264180552,
"count": 63591,
"is_parallel": true,
"self": 1028.5575264180552
},
"steps_from_proto": {
"total": 93.89927528195585,
"count": 63591,
"is_parallel": true,
"self": 18.677189549945524,
"children": {
"_process_rank_one_or_two_observation": {
"total": 75.22208573201033,
"count": 508728,
"is_parallel": true,
"self": 75.22208573201033
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 641.5766271950365,
"count": 63592,
"self": 2.336274466064424,
"children": {
"process_trajectory": {
"total": 123.58145582197221,
"count": 63592,
"self": 123.39350980897234,
"children": {
"RLTrainer._checkpoint": {
"total": 0.18794601299987335,
"count": 2,
"self": 0.18794601299987335
}
}
},
"_update_policy": {
"total": 515.6588969069999,
"count": 441,
"self": 287.99666764094854,
"children": {
"TorchPPOOptimizer.update": {
"total": 227.66222926605133,
"count": 22833,
"self": 227.66222926605133
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.1170004654559307e-06,
"count": 1,
"self": 1.1170004654559307e-06
},
"TrainerController._save_models": {
"total": 0.07641491899994435,
"count": 1,
"self": 0.0013094429996272083,
"children": {
"RLTrainer._checkpoint": {
"total": 0.07510547600031714,
"count": 1,
"self": 0.07510547600031714
}
}
}
}
}
}
}