Pyramid / run_logs /timers.json
PabloTa's picture
first
ddbc3bc
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.18350118398666382,
"min": 0.18278071284294128,
"max": 1.3706443309783936,
"count": 50
},
"Pyramids.Policy.Entropy.sum": {
"value": 5502.099609375,
"min": 5460.025390625,
"max": 41579.8671875,
"count": 50
},
"Pyramids.Step.mean": {
"value": 1499901.0,
"min": 29952.0,
"max": 1499901.0,
"count": 50
},
"Pyramids.Step.sum": {
"value": 1499901.0,
"min": 29952.0,
"max": 1499901.0,
"count": 50
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.7083656787872314,
"min": -0.049884118139743805,
"max": 0.7608505487442017,
"count": 50
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 201.88421630859375,
"min": -12.022072792053223,
"max": 223.6900634765625,
"count": 50
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.006795857101678848,
"min": -0.004466857761144638,
"max": 0.45243701338768005,
"count": 50
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 1.936819314956665,
"min": -1.2551870346069336,
"max": 107.22756958007812,
"count": 50
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07382442496298924,
"min": 0.06461313227815221,
"max": 0.07393613607690838,
"count": 50
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.0335419494818494,
"min": 0.5018676308145273,
"max": 1.082827937391509,
"count": 50
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015811939797038408,
"min": 0.00020058981358704928,
"max": 0.017626756042729886,
"count": 50
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2213671571585377,
"min": 0.001404128695109345,
"max": 0.2567393596043842,
"count": 50
},
"Pyramids.Policy.LearningRate.mean": {
"value": 3.1430846666238102e-06,
"min": 3.1430846666238102e-06,
"max": 0.00029676708679192377,
"count": 50
},
"Pyramids.Policy.LearningRate.sum": {
"value": 4.4003185332733344e-05,
"min": 4.4003185332733344e-05,
"max": 0.0038249406250198664,
"count": 50
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10104766190476193,
"min": 0.10104766190476193,
"max": 0.19892236190476195,
"count": 50
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.414667266666667,
"min": 1.3794090666666667,
"max": 2.7749801333333335,
"count": 50
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00011466142428571429,
"min": 0.00011466142428571429,
"max": 0.009892343954285714,
"count": 50
},
"Pyramids.Policy.Beta.sum": {
"value": 0.00160525994,
"min": 0.00160525994,
"max": 0.12752051532000003,
"count": 50
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.008289541117846966,
"min": 0.008289541117846966,
"max": 0.5864227414131165,
"count": 50
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.11605358123779297,
"min": 0.11605358123779297,
"max": 4.104959011077881,
"count": 50
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 285.7943925233645,
"min": 241.6829268292683,
"max": 999.0,
"count": 50
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30580.0,
"min": 15984.0,
"max": 33285.0,
"count": 50
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6394205472179662,
"min": -1.0000000521540642,
"max": 1.7444187964384372,
"count": 50
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 175.4179985523224,
"min": -32.000001668930054,
"max": 212.27179804444313,
"count": 50
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6394205472179662,
"min": -1.0000000521540642,
"max": 1.7444187964384372,
"count": 50
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 175.4179985523224,
"min": -32.000001668930054,
"max": 212.27179804444313,
"count": 50
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.025221727137352126,
"min": 0.022200211014918512,
"max": 12.058133356273174,
"count": 50
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.6987248036966776,
"min": 2.5448229197063483,
"max": 192.9301337003708,
"count": 50
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 50
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 50
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1680708260",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1680712117"
},
"total": 3857.337979299,
"count": 1,
"self": 0.4802387469990208,
"children": {
"run_training.setup": {
"total": 0.12208969899984368,
"count": 1,
"self": 0.12208969899984368
},
"TrainerController.start_learning": {
"total": 3856.735650853001,
"count": 1,
"self": 2.5176135073270416,
"children": {
"TrainerController._reset_env": {
"total": 4.187815661998684,
"count": 1,
"self": 4.187815661998684
},
"TrainerController.advance": {
"total": 3849.895607516677,
"count": 96901,
"self": 2.5142060716370906,
"children": {
"env_step": {
"total": 2840.20973961213,
"count": 96901,
"self": 2653.65257890089,
"children": {
"SubprocessEnvManager._take_step": {
"total": 185.08426722985132,
"count": 96901,
"self": 7.56914735270766,
"children": {
"TorchPolicy.evaluate": {
"total": 177.51511987714366,
"count": 93805,
"self": 177.51511987714366
}
}
},
"workers": {
"total": 1.4728934813883825,
"count": 96901,
"self": 0.0,
"children": {
"worker_root": {
"total": 3847.686080408068,
"count": 96901,
"is_parallel": true,
"self": 1380.8562785770027,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0019676479987538187,
"count": 1,
"is_parallel": true,
"self": 0.0006497310005215695,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013179169982322492,
"count": 8,
"is_parallel": true,
"self": 0.0013179169982322492
}
}
},
"UnityEnvironment.step": {
"total": 0.051746670998909394,
"count": 1,
"is_parallel": true,
"self": 0.0006192049968376523,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005490420007845387,
"count": 1,
"is_parallel": true,
"self": 0.0005490420007845387
},
"communicator.exchange": {
"total": 0.048714160000599804,
"count": 1,
"is_parallel": true,
"self": 0.048714160000599804
},
"steps_from_proto": {
"total": 0.001864264000687399,
"count": 1,
"is_parallel": true,
"self": 0.0004189040046185255,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014453599960688734,
"count": 8,
"is_parallel": true,
"self": 0.0014453599960688734
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 2466.829801831065,
"count": 96900,
"is_parallel": true,
"self": 52.83836421567321,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 36.516817352701764,
"count": 96900,
"is_parallel": true,
"self": 36.516817352701764
},
"communicator.exchange": {
"total": 2224.4332258787636,
"count": 96900,
"is_parallel": true,
"self": 2224.4332258787636
},
"steps_from_proto": {
"total": 153.04139438392667,
"count": 96900,
"is_parallel": true,
"self": 33.25704529172435,
"children": {
"_process_rank_one_or_two_observation": {
"total": 119.78434909220232,
"count": 775200,
"is_parallel": true,
"self": 119.78434909220232
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1007.1716618329101,
"count": 96901,
"self": 4.574954038829674,
"children": {
"process_trajectory": {
"total": 172.5113150200832,
"count": 96901,
"self": 171.96421503308375,
"children": {
"RLTrainer._checkpoint": {
"total": 0.5470999869994557,
"count": 3,
"self": 0.5470999869994557
}
}
},
"_update_policy": {
"total": 830.0853927739972,
"count": 688,
"self": 510.09852103289813,
"children": {
"TorchPPOOptimizer.update": {
"total": 319.9868717410991,
"count": 34185,
"self": 319.9868717410991
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.1209995136596262e-06,
"count": 1,
"self": 1.1209995136596262e-06
},
"TrainerController._save_models": {
"total": 0.13461304599877622,
"count": 1,
"self": 0.002820503999828361,
"children": {
"RLTrainer._checkpoint": {
"total": 0.13179254199894785,
"count": 1,
"self": 0.13179254199894785
}
}
}
}
}
}
}