ppo-Pyramids / run_logs /timers.json
redfungus's picture
First Push
222f8e0
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5365502238273621,
"min": 0.5365502238273621,
"max": 1.4933209419250488,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 16130.845703125,
"min": 16130.845703125,
"max": 45301.3828125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989962.0,
"min": 29952.0,
"max": 989962.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989962.0,
"min": 29952.0,
"max": 989962.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.06402862071990967,
"min": -0.10768884420394897,
"max": 0.09902215749025345,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 15.815069198608398,
"min": -25.95301055908203,
"max": 24.656517028808594,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.048150576651096344,
"min": -0.048150576651096344,
"max": 0.1367024928331375,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -11.893192291259766,
"min": -11.893192291259766,
"max": 32.9453010559082,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06799920988924599,
"min": 0.06453638719316358,
"max": 0.07324390831351811,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9519889384494438,
"min": 0.4939414654238198,
"max": 1.0181615443103582,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.008312903345578904,
"min": 0.0005193653563030646,
"max": 0.008312903345578904,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.11638064683810465,
"min": 0.007271114988242905,
"max": 0.11638064683810465,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.694840292228573e-06,
"min": 7.694840292228573e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010772776409120003,
"min": 0.00010772776409120003,
"max": 0.0033718342760553,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10256491428571425,
"min": 0.10256491428571425,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4359087999999995,
"min": 1.3886848,
"max": 2.4431751000000004,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026623493714285725,
"min": 0.00026623493714285725,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0037272891200000016,
"min": 0.0037272891200000016,
"max": 0.11241207552999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.011129645630717278,
"min": 0.010585368610918522,
"max": 0.3125114142894745,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1558150351047516,
"min": 0.14819516241550446,
"max": 2.187579870223999,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 835.6486486486486,
"min": 760.2702702702703,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30919.0,
"min": 15984.0,
"max": 33099.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": -0.07927570814216459,
"min": -1.0000000521540642,
"max": 0.39108417849791677,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": -2.93320120126009,
"min": -31.998401671648026,
"max": 14.861198782920837,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": -0.07927570814216459,
"min": -1.0000000521540642,
"max": 0.39108417849791677,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": -2.93320120126009,
"min": -31.998401671648026,
"max": 14.861198782920837,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.09616883003075821,
"min": 0.08563765169133265,
"max": 6.396927391178906,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.558246711138054,
"min": 3.1652114451280795,
"max": 102.3508382588625,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1681761766",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1681763706"
},
"total": 1940.016888574,
"count": 1,
"self": 0.4241349049998462,
"children": {
"run_training.setup": {
"total": 0.11491569800000434,
"count": 1,
"self": 0.11491569800000434
},
"TrainerController.start_learning": {
"total": 1939.477837971,
"count": 1,
"self": 1.2435745010684514,
"children": {
"TrainerController._reset_env": {
"total": 3.767283196000335,
"count": 1,
"self": 3.767283196000335
},
"TrainerController.advance": {
"total": 1934.3728557009322,
"count": 63218,
"self": 1.3089414289538581,
"children": {
"env_step": {
"total": 1332.5932610970049,
"count": 63218,
"self": 1233.1490597651982,
"children": {
"SubprocessEnvManager._take_step": {
"total": 98.69078752993164,
"count": 63218,
"self": 4.536203546865636,
"children": {
"TorchPolicy.evaluate": {
"total": 94.154583983066,
"count": 62566,
"self": 94.154583983066
}
}
},
"workers": {
"total": 0.7534138018750127,
"count": 63218,
"self": 0.0,
"children": {
"worker_root": {
"total": 1935.13881596694,
"count": 63218,
"is_parallel": true,
"self": 804.8891726078964,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0017417100002603547,
"count": 1,
"is_parallel": true,
"self": 0.0005532680002033885,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011884420000569662,
"count": 8,
"is_parallel": true,
"self": 0.0011884420000569662
}
}
},
"UnityEnvironment.step": {
"total": 0.04603518400017492,
"count": 1,
"is_parallel": true,
"self": 0.0005344760002117255,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00047093600005609915,
"count": 1,
"is_parallel": true,
"self": 0.00047093600005609915
},
"communicator.exchange": {
"total": 0.043420332000096096,
"count": 1,
"is_parallel": true,
"self": 0.043420332000096096
},
"steps_from_proto": {
"total": 0.001609439999811002,
"count": 1,
"is_parallel": true,
"self": 0.0003687599996737845,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012406800001372176,
"count": 8,
"is_parallel": true,
"self": 0.0012406800001372176
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1130.2496433590436,
"count": 63217,
"is_parallel": true,
"self": 31.19939596012682,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.1983211360689,
"count": 63217,
"is_parallel": true,
"self": 22.1983211360689
},
"communicator.exchange": {
"total": 987.4359414758919,
"count": 63217,
"is_parallel": true,
"self": 987.4359414758919
},
"steps_from_proto": {
"total": 89.41598478695596,
"count": 63217,
"is_parallel": true,
"self": 18.555440602806357,
"children": {
"_process_rank_one_or_two_observation": {
"total": 70.8605441841496,
"count": 505736,
"is_parallel": true,
"self": 70.8605441841496
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 600.4706531749734,
"count": 63218,
"self": 2.2944297449262194,
"children": {
"process_trajectory": {
"total": 99.7101897470443,
"count": 63218,
"self": 99.50453740804369,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20565233900060775,
"count": 2,
"self": 0.20565233900060775
}
}
},
"_update_policy": {
"total": 498.46603368300293,
"count": 444,
"self": 321.74128852101694,
"children": {
"TorchPPOOptimizer.update": {
"total": 176.724745161986,
"count": 22806,
"self": 176.724745161986
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.959994713426568e-07,
"count": 1,
"self": 9.959994713426568e-07
},
"TrainerController._save_models": {
"total": 0.09412357699966378,
"count": 1,
"self": 0.0016023460002543288,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09252123099940945,
"count": 1,
"self": 0.09252123099940945
}
}
}
}
}
}
}