Pyramids / run_logs /timers.json
ThatOneSkyler's picture
Push 1
7ace91d verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.41704049706459045,
"min": 0.41002610325813293,
"max": 1.4648184776306152,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 12531.232421875,
"min": 12097.41015625,
"max": 44436.734375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989903.0,
"min": 29952.0,
"max": 989903.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989903.0,
"min": 29952.0,
"max": 989903.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.596138060092926,
"min": -0.11757717281579971,
"max": 0.6042474508285522,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 168.7070770263672,
"min": -28.218521118164062,
"max": 169.1892852783203,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.07390497624874115,
"min": -0.006074411794543266,
"max": 0.35640329122543335,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 20.91510772705078,
"min": -1.6461656093597412,
"max": 84.46758270263672,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06840651527108137,
"min": 0.06483729437154363,
"max": 0.07449178403753433,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9576912137951392,
"min": 0.4917558679630145,
"max": 1.0574620285364755,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.017522719688367074,
"min": 0.00015051816314984014,
"max": 0.018688217902599107,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.24531807563713906,
"min": 0.0016556997946482416,
"max": 0.2651820292715759,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.361711831842859e-06,
"min": 7.361711831842859e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010306396564580003,
"min": 0.00010306396564580003,
"max": 0.0035090015303329,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10245387142857144,
"min": 0.10245387142857144,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4343542000000002,
"min": 1.3691136000000002,
"max": 2.5696671,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025514175571428576,
"min": 0.00025514175571428576,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035719845800000006,
"min": 0.0035719845800000006,
"max": 0.11698974329000002,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.013458915054798126,
"min": 0.013458915054798126,
"max": 0.4578872323036194,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.18842481076717377,
"min": 0.18842481076717377,
"max": 3.2052106857299805,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 314.4791666666667,
"min": 300.1979166666667,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30190.0,
"min": 15984.0,
"max": 32093.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6438499811726313,
"min": -1.0000000521540642,
"max": 1.6778679422093827,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 157.8095981925726,
"min": -32.000001668930054,
"max": 172.8203980475664,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6438499811726313,
"min": -1.0000000521540642,
"max": 1.6778679422093827,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 157.8095981925726,
"min": -32.000001668930054,
"max": 172.8203980475664,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04385424695798671,
"min": 0.04269222809748896,
"max": 8.888745879754424,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.210007707966724,
"min": 4.210007707966724,
"max": 142.21993407607079,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1715672731",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1715674976"
},
"total": 2244.839601249,
"count": 1,
"self": 0.7459130830002323,
"children": {
"run_training.setup": {
"total": 0.04923896800005423,
"count": 1,
"self": 0.04923896800005423
},
"TrainerController.start_learning": {
"total": 2244.044449198,
"count": 1,
"self": 1.3705190190235044,
"children": {
"TrainerController._reset_env": {
"total": 2.178777944000103,
"count": 1,
"self": 2.178777944000103
},
"TrainerController.advance": {
"total": 2240.3666258679764,
"count": 63955,
"self": 1.4858646750253683,
"children": {
"env_step": {
"total": 1604.0709354750122,
"count": 63955,
"self": 1472.8144537790472,
"children": {
"SubprocessEnvManager._take_step": {
"total": 130.40933459698135,
"count": 63955,
"self": 4.6102120589632705,
"children": {
"TorchPolicy.evaluate": {
"total": 125.79912253801808,
"count": 62579,
"self": 125.79912253801808
}
}
},
"workers": {
"total": 0.8471470989836689,
"count": 63955,
"self": 0.0,
"children": {
"worker_root": {
"total": 2239.021191920028,
"count": 63955,
"is_parallel": true,
"self": 891.2746604730173,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.00204140699997879,
"count": 1,
"is_parallel": true,
"self": 0.0005854059998000594,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014560010001787305,
"count": 8,
"is_parallel": true,
"self": 0.0014560010001787305
}
}
},
"UnityEnvironment.step": {
"total": 0.04848448500001723,
"count": 1,
"is_parallel": true,
"self": 0.0006518759998925816,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004389240000364225,
"count": 1,
"is_parallel": true,
"self": 0.0004389240000364225
},
"communicator.exchange": {
"total": 0.04569351400004962,
"count": 1,
"is_parallel": true,
"self": 0.04569351400004962
},
"steps_from_proto": {
"total": 0.0017001710000386083,
"count": 1,
"is_parallel": true,
"self": 0.00034697000000960543,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001353201000029003,
"count": 8,
"is_parallel": true,
"self": 0.001353201000029003
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1347.7465314470105,
"count": 63954,
"is_parallel": true,
"self": 34.08612006289445,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 23.81108018600753,
"count": 63954,
"is_parallel": true,
"self": 23.81108018600753
},
"communicator.exchange": {
"total": 1192.725198912039,
"count": 63954,
"is_parallel": true,
"self": 1192.725198912039
},
"steps_from_proto": {
"total": 97.12413228606943,
"count": 63954,
"is_parallel": true,
"self": 19.61563785514045,
"children": {
"_process_rank_one_or_two_observation": {
"total": 77.50849443092898,
"count": 511632,
"is_parallel": true,
"self": 77.50849443092898
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 634.8098257179388,
"count": 63955,
"self": 2.659910132915911,
"children": {
"process_trajectory": {
"total": 127.05026433102717,
"count": 63955,
"self": 126.60175092002714,
"children": {
"RLTrainer._checkpoint": {
"total": 0.44851341100002173,
"count": 2,
"self": 0.44851341100002173
}
}
},
"_update_policy": {
"total": 505.0996512539957,
"count": 448,
"self": 298.9264912089643,
"children": {
"TorchPPOOptimizer.update": {
"total": 206.1731600450314,
"count": 22797,
"self": 206.1731600450314
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.4349998309626244e-06,
"count": 1,
"self": 1.4349998309626244e-06
},
"TrainerController._save_models": {
"total": 0.12852493200034587,
"count": 1,
"self": 0.0021049900005891686,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1264199419997567,
"count": 1,
"self": 0.1264199419997567
}
}
}
}
}
}
}