ppo-Pyramids / run_logs /timers.json
abhijeet2022's picture
first push
316d5ff
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.23297137022018433,
"min": 0.21157538890838623,
"max": 1.3655118942260742,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 6977.95849609375,
"min": 6357.41748046875,
"max": 41424.16796875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989975.0,
"min": 29952.0,
"max": 989975.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989975.0,
"min": 29952.0,
"max": 989975.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5262762904167175,
"min": -0.19615019857883453,
"max": 0.6083706617355347,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 144.72598266601562,
"min": -46.48759841918945,
"max": 170.9521484375,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.0064531611278653145,
"min": -0.0064531611278653145,
"max": 0.4995083808898926,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -1.7746193408966064,
"min": -1.7746193408966064,
"max": 118.38348388671875,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.0690342742072207,
"min": 0.06555259116811656,
"max": 0.07352872456734379,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9664798389010898,
"min": 0.5030814734840867,
"max": 1.1028029757435434,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.016259669899721683,
"min": 0.0014329635433919753,
"max": 0.016259669899721683,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.22763537859610358,
"min": 0.012896671890527778,
"max": 0.23817671794677153,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.62740460042143e-06,
"min": 7.62740460042143e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010678366440590002,
"min": 0.00010678366440590002,
"max": 0.0036336994887668996,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.1025424357142857,
"min": 0.1025424357142857,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4355940999999999,
"min": 1.3886848,
"max": 2.6112331000000006,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002639893278571429,
"min": 0.0002639893278571429,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003695850590000001,
"min": 0.003695850590000001,
"max": 0.12114218669,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012653774581849575,
"min": 0.011542811058461666,
"max": 0.5906227827072144,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1771528422832489,
"min": 0.16159935295581818,
"max": 4.134359359741211,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 330.4597701149425,
"min": 307.2842105263158,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 28750.0,
"min": 15984.0,
"max": 34544.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.554565504792778,
"min": -1.0000000521540642,
"max": 1.608486297569777,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 135.24719891697168,
"min": -29.152201764285564,
"max": 152.8061982691288,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.554565504792778,
"min": -1.0000000521540642,
"max": 1.608486297569777,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 135.24719891697168,
"min": -29.152201764285564,
"max": 152.8061982691288,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04355037355348575,
"min": 0.04308971196082176,
"max": 11.917101632803679,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.78888249915326,
"min": 3.6592654721753206,
"max": 190.67362612485886,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1698344773",
"python_version": "3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.0+cu118",
"numpy_version": "1.23.5",
"end_time_seconds": "1698347134"
},
"total": 2360.9528336760004,
"count": 1,
"self": 0.4908952280011363,
"children": {
"run_training.setup": {
"total": 0.07102503000010074,
"count": 1,
"self": 0.07102503000010074
},
"TrainerController.start_learning": {
"total": 2360.3909134179994,
"count": 1,
"self": 1.5639647420252913,
"children": {
"TrainerController._reset_env": {
"total": 3.820204004000061,
"count": 1,
"self": 3.820204004000061
},
"TrainerController.advance": {
"total": 2354.9246315299733,
"count": 63996,
"self": 1.5690374621626688,
"children": {
"env_step": {
"total": 1700.3897112769516,
"count": 63996,
"self": 1554.1335494589378,
"children": {
"SubprocessEnvManager._take_step": {
"total": 145.29347375304133,
"count": 63996,
"self": 4.968374234062821,
"children": {
"TorchPolicy.evaluate": {
"total": 140.3250995189785,
"count": 62566,
"self": 140.3250995189785
}
}
},
"workers": {
"total": 0.9626880649725535,
"count": 63996,
"self": 0.0,
"children": {
"worker_root": {
"total": 2354.9799347829016,
"count": 63996,
"is_parallel": true,
"self": 926.0123415179355,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0019381209999664861,
"count": 1,
"is_parallel": true,
"self": 0.0006381220005096111,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001299998999456875,
"count": 8,
"is_parallel": true,
"self": 0.001299998999456875
}
}
},
"UnityEnvironment.step": {
"total": 0.08003846700012218,
"count": 1,
"is_parallel": true,
"self": 0.0005977759999495902,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004764889999933075,
"count": 1,
"is_parallel": true,
"self": 0.0004764889999933075
},
"communicator.exchange": {
"total": 0.07740171600016765,
"count": 1,
"is_parallel": true,
"self": 0.07740171600016765
},
"steps_from_proto": {
"total": 0.0015624860000116314,
"count": 1,
"is_parallel": true,
"self": 0.00033565900025678275,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012268269997548487,
"count": 8,
"is_parallel": true,
"self": 0.0012268269997548487
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1428.9675932649661,
"count": 63995,
"is_parallel": true,
"self": 36.36175975404262,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 24.954958330031786,
"count": 63995,
"is_parallel": true,
"self": 24.954958330031786
},
"communicator.exchange": {
"total": 1265.221783471943,
"count": 63995,
"is_parallel": true,
"self": 1265.221783471943
},
"steps_from_proto": {
"total": 102.42909170894882,
"count": 63995,
"is_parallel": true,
"self": 21.197555764814297,
"children": {
"_process_rank_one_or_two_observation": {
"total": 81.23153594413452,
"count": 511960,
"is_parallel": true,
"self": 81.23153594413452
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 652.9658827908588,
"count": 63996,
"self": 3.1025521708993438,
"children": {
"process_trajectory": {
"total": 124.56737260595924,
"count": 63996,
"self": 124.40279876195905,
"children": {
"RLTrainer._checkpoint": {
"total": 0.16457384400018782,
"count": 2,
"self": 0.16457384400018782
}
}
},
"_update_policy": {
"total": 525.2959580140002,
"count": 454,
"self": 312.86833557398677,
"children": {
"TorchPPOOptimizer.update": {
"total": 212.42762244001347,
"count": 22752,
"self": 212.42762244001347
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.810000847210176e-07,
"count": 1,
"self": 9.810000847210176e-07
},
"TrainerController._save_models": {
"total": 0.08211216100062302,
"count": 1,
"self": 0.0014332290002130321,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08067893200040999,
"count": 1,
"self": 0.08067893200040999
}
}
}
}
}
}
}