KJan05's picture
First Push
99dd4fc
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.48065105080604553,
"min": 0.4798978269100189,
"max": 1.1220625638961792,
"count": 29
},
"Pyramids.Policy.Entropy.sum": {
"value": 14388.76953125,
"min": 14212.654296875,
"max": 34110.703125,
"count": 29
},
"Pyramids.Step.mean": {
"value": 989948.0,
"min": 149995.0,
"max": 989948.0,
"count": 29
},
"Pyramids.Step.sum": {
"value": 989948.0,
"min": 149995.0,
"max": 989948.0,
"count": 29
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5064566731452942,
"min": -0.07418560981750488,
"max": 0.5412768125534058,
"count": 29
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 135.22393798828125,
"min": -17.952917098999023,
"max": 147.227294921875,
"count": 29
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.03052312135696411,
"min": -0.001319264993071556,
"max": 0.06609056890010834,
"count": 29
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 8.149673461914062,
"min": -0.33113551139831543,
"max": 13.795373916625977,
"count": 29
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06964786789224793,
"min": 0.06426054523977012,
"max": 0.07468332773012172,
"count": 29
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9750701504914709,
"min": 0.3520741780367465,
"max": 1.071499333552578,
"count": 29
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.014746138068620072,
"min": 0.00017284317698471275,
"max": 0.015604803865634105,
"count": 29
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.206445932960681,
"min": 0.0008642158849235637,
"max": 0.21846725411887746,
"count": 29
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.372940399528579e-06,
"min": 7.372940399528579e-06,
"max": 0.00025742839419054,
"count": 29
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010322116559340011,
"min": 0.00010322116559340011,
"max": 0.0032525979158008005,
"count": 29
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10245761428571429,
"min": 0.10245761428571429,
"max": 0.18580946000000004,
"count": 29
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4344066,
"min": 0.9290473000000002,
"max": 2.5275625000000006,
"count": 29
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002555156671428574,
"min": 0.0002555156671428574,
"max": 0.008582365054000002,
"count": 29
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035772193400000034,
"min": 0.0035772193400000034,
"max": 0.10845150007999999,
"count": 29
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.00785643607378006,
"min": 0.00785643607378006,
"max": 0.05546265095472336,
"count": 29
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.10999010503292084,
"min": 0.10999010503292084,
"max": 0.5188475847244263,
"count": 29
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 397.49333333333334,
"min": 361.7439024390244,
"max": 999.0,
"count": 29
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29812.0,
"min": 15984.0,
"max": 32343.0,
"count": 29
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5224933087825776,
"min": -1.0000000521540642,
"max": 1.5406560884198037,
"count": 29
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 114.18699815869331,
"min": -22.684201456606388,
"max": 126.33379925042391,
"count": 29
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5224933087825776,
"min": -1.0000000521540642,
"max": 1.5406560884198037,
"count": 29
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 114.18699815869331,
"min": -22.684201456606388,
"max": 126.33379925042391,
"count": 29
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03290017237828579,
"min": 0.03027044881132386,
"max": 0.5963121356908232,
"count": 29
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.467512928371434,
"min": 2.3938862106733723,
"max": 15.233707860810682,
"count": 29
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 29
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 29
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1688712922",
"python_version": "3.10.12 (main, Jun 7 2023, 12:45:35) [GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=KLJan05-Pyramids-Training --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1688714875"
},
"total": 1953.1778916150001,
"count": 1,
"self": 0.47664636300032726,
"children": {
"run_training.setup": {
"total": 0.03846240099994702,
"count": 1,
"self": 0.03846240099994702
},
"TrainerController.start_learning": {
"total": 1952.662782851,
"count": 1,
"self": 1.4037849070359698,
"children": {
"TrainerController._reset_env": {
"total": 3.996508485999925,
"count": 1,
"self": 3.996508485999925
},
"TrainerController.advance": {
"total": 1947.168179516964,
"count": 55676,
"self": 1.3236069869774383,
"children": {
"env_step": {
"total": 1390.1448735000317,
"count": 55676,
"self": 1285.6247089229973,
"children": {
"SubprocessEnvManager._take_step": {
"total": 103.69748869599846,
"count": 55676,
"self": 4.303733272982413,
"children": {
"TorchPolicy.evaluate": {
"total": 99.39375542301605,
"count": 54443,
"self": 99.39375542301605
}
}
},
"workers": {
"total": 0.8226758810358206,
"count": 55676,
"self": 0.0,
"children": {
"worker_root": {
"total": 1947.2436838209596,
"count": 55676,
"is_parallel": true,
"self": 767.1333403839435,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.001721568999982992,
"count": 1,
"is_parallel": true,
"self": 0.0005420090001280187,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011795599998549733,
"count": 8,
"is_parallel": true,
"self": 0.0011795599998549733
}
}
},
"UnityEnvironment.step": {
"total": 0.052451023999992685,
"count": 1,
"is_parallel": true,
"self": 0.0005916090000255281,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005768480000369891,
"count": 1,
"is_parallel": true,
"self": 0.0005768480000369891
},
"communicator.exchange": {
"total": 0.04933949299993401,
"count": 1,
"is_parallel": true,
"self": 0.04933949299993401
},
"steps_from_proto": {
"total": 0.0019430739999961588,
"count": 1,
"is_parallel": true,
"self": 0.00040355399983127427,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0015395200001648846,
"count": 8,
"is_parallel": true,
"self": 0.0015395200001648846
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1180.1103434370161,
"count": 55675,
"is_parallel": true,
"self": 29.428543415944432,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 19.77364289504635,
"count": 55675,
"is_parallel": true,
"self": 19.77364289504635
},
"communicator.exchange": {
"total": 1041.0423267700226,
"count": 55675,
"is_parallel": true,
"self": 1041.0423267700226
},
"steps_from_proto": {
"total": 89.86583035600279,
"count": 55675,
"is_parallel": true,
"self": 17.994584492995045,
"children": {
"_process_rank_one_or_two_observation": {
"total": 71.87124586300774,
"count": 445400,
"is_parallel": true,
"self": 71.87124586300774
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 555.6996990299548,
"count": 55676,
"self": 2.6135592619712043,
"children": {
"process_trajectory": {
"total": 94.8778441619827,
"count": 55676,
"self": 94.59408217598298,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2837619859997176,
"count": 2,
"self": 0.2837619859997176
}
}
},
"_update_policy": {
"total": 458.2082956060009,
"count": 401,
"self": 289.97399806600424,
"children": {
"TorchPPOOptimizer.update": {
"total": 168.23429753999665,
"count": 19791,
"self": 168.23429753999665
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.097000222216593e-06,
"count": 1,
"self": 1.097000222216593e-06
},
"TrainerController._save_models": {
"total": 0.09430884399989736,
"count": 1,
"self": 0.0016985620000014023,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09261028199989596,
"count": 1,
"self": 0.09261028199989596
}
}
}
}
}
}
}