ppo-Pyramids / run_logs /timers.json
diskya's picture
First Push
db15198 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5242890119552612,
"min": 0.5198160409927368,
"max": 1.5085222721099854,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 15812.556640625,
"min": 15602.798828125,
"max": 45762.53125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989952.0,
"min": 29952.0,
"max": 989952.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989952.0,
"min": 29952.0,
"max": 989952.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.414237380027771,
"min": -0.09166667610406876,
"max": 0.5567175149917603,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 111.01561737060547,
"min": -22.000001907348633,
"max": 155.32418823242188,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.002206940669566393,
"min": -0.008212718181312084,
"max": 0.36719396710395813,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 0.591460108757019,
"min": -2.159944772720337,
"max": 87.02497100830078,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07065573055435727,
"min": 0.06359740228591752,
"max": 0.07496003907061946,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9891802277610017,
"min": 0.5023638336673706,
"max": 1.0646917481279554,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.01340566594375386,
"min": 0.0010241960521957378,
"max": 0.015459363466878803,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.18767932321255404,
"min": 0.012290352626348854,
"max": 0.21643108853630325,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.603104608521431e-06,
"min": 7.603104608521431e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010644346451930003,
"min": 0.00010644346451930003,
"max": 0.0037582933472355994,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10253433571428573,
"min": 0.10253433571428573,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4354807000000003,
"min": 1.3886848,
"max": 2.6527644000000006,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002631801378571429,
"min": 0.0002631801378571429,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0036845219300000007,
"min": 0.0036845219300000007,
"max": 0.12529116356,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.009434044361114502,
"min": 0.009257475845515728,
"max": 0.3544917702674866,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.13207662105560303,
"min": 0.12960466742515564,
"max": 2.481442451477051,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 389.9577464788732,
"min": 334.55172413793105,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 27687.0,
"min": 15984.0,
"max": 34571.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.4348555342811677,
"min": -1.0000000521540642,
"max": 1.5504666532593212,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 103.30959846824408,
"min": -28.433401599526405,
"max": 134.89059883356094,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.4348555342811677,
"min": -1.0000000521540642,
"max": 1.5504666532593212,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 103.30959846824408,
"min": -28.433401599526405,
"max": 134.89059883356094,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03827408184118516,
"min": 0.0359413270055996,
"max": 7.103593226522207,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.755733892565331,
"min": 2.755733892565331,
"max": 113.65749162435532,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1709502282",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1709504632"
},
"total": 2350.170255792,
"count": 1,
"self": 0.6931259999992108,
"children": {
"run_training.setup": {
"total": 0.05467059600050561,
"count": 1,
"self": 0.05467059600050561
},
"TrainerController.start_learning": {
"total": 2349.422459196,
"count": 1,
"self": 1.5782962950297588,
"children": {
"TrainerController._reset_env": {
"total": 2.2112902369999574,
"count": 1,
"self": 2.2112902369999574
},
"TrainerController.advance": {
"total": 2345.494155590971,
"count": 63774,
"self": 1.6340729130024556,
"children": {
"env_step": {
"total": 1698.1845696958835,
"count": 63774,
"self": 1554.5454342908997,
"children": {
"SubprocessEnvManager._take_step": {
"total": 142.6651070959133,
"count": 63774,
"self": 5.183118036145061,
"children": {
"TorchPolicy.evaluate": {
"total": 137.48198905976824,
"count": 62546,
"self": 137.48198905976824
}
}
},
"workers": {
"total": 0.9740283090704906,
"count": 63774,
"self": 0.0,
"children": {
"worker_root": {
"total": 2343.4589487439507,
"count": 63774,
"is_parallel": true,
"self": 919.7179341088777,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.001937319999342435,
"count": 1,
"is_parallel": true,
"self": 0.0005849919998581754,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013523279994842596,
"count": 8,
"is_parallel": true,
"self": 0.0013523279994842596
}
}
},
"UnityEnvironment.step": {
"total": 0.05412215100022877,
"count": 1,
"is_parallel": true,
"self": 0.0007157609998102998,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005623359993478516,
"count": 1,
"is_parallel": true,
"self": 0.0005623359993478516
},
"communicator.exchange": {
"total": 0.05073796600026981,
"count": 1,
"is_parallel": true,
"self": 0.05073796600026981
},
"steps_from_proto": {
"total": 0.00210608800080081,
"count": 1,
"is_parallel": true,
"self": 0.0005625260000670096,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0015435620007338002,
"count": 8,
"is_parallel": true,
"self": 0.0015435620007338002
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1423.741014635073,
"count": 63773,
"is_parallel": true,
"self": 37.81483607990867,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 27.913193535046958,
"count": 63773,
"is_parallel": true,
"self": 27.913193535046958
},
"communicator.exchange": {
"total": 1245.2838433260513,
"count": 63773,
"is_parallel": true,
"self": 1245.2838433260513
},
"steps_from_proto": {
"total": 112.72914169406613,
"count": 63773,
"is_parallel": true,
"self": 22.985449912347576,
"children": {
"_process_rank_one_or_two_observation": {
"total": 89.74369178171855,
"count": 510184,
"is_parallel": true,
"self": 89.74369178171855
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 645.6755129820849,
"count": 63774,
"self": 3.204582051092075,
"children": {
"process_trajectory": {
"total": 134.60687019900433,
"count": 63774,
"self": 134.24096917700444,
"children": {
"RLTrainer._checkpoint": {
"total": 0.36590102199988905,
"count": 2,
"self": 0.36590102199988905
}
}
},
"_update_policy": {
"total": 507.8640607319885,
"count": 456,
"self": 300.7271611360802,
"children": {
"TorchPPOOptimizer.update": {
"total": 207.1368995959083,
"count": 22785,
"self": 207.1368995959083
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.2040000001434237e-06,
"count": 1,
"self": 1.2040000001434237e-06
},
"TrainerController._save_models": {
"total": 0.13871586899949762,
"count": 1,
"self": 0.0020124489992667804,
"children": {
"RLTrainer._checkpoint": {
"total": 0.13670342000023084,
"count": 1,
"self": 0.13670342000023084
}
}
}
}
}
}
}