ppo-Pyramids / run_logs /timers.json
Xharos's picture
First Push
b67214f verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.3820152282714844,
"min": 0.3820152282714844,
"max": 1.4666264057159424,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 11368.7734375,
"min": 11368.7734375,
"max": 44491.578125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989975.0,
"min": 29952.0,
"max": 989975.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989975.0,
"min": 29952.0,
"max": 989975.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.25611087679862976,
"min": -0.12301318347454071,
"max": 0.34956738352775574,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 65.82049560546875,
"min": -29.523164749145508,
"max": 91.58665466308594,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.001834269380196929,
"min": -0.023266626521945,
"max": 0.33336490392684937,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -0.4714072346687317,
"min": -5.770123481750488,
"max": 79.00748443603516,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07096227088810078,
"min": 0.06553525356682216,
"max": 0.07096227088810078,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9934717924334109,
"min": 0.4903418627589668,
"max": 1.0208580700370173,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.012679628994755191,
"min": 0.0001545016771361885,
"max": 0.012679628994755191,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.17751480592657268,
"min": 0.0020085218027704505,
"max": 0.17751480592657268,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.748218845864285e-06,
"min": 7.748218845864285e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010847506384209999,
"min": 0.00010847506384209999,
"max": 0.0035070869309710992,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10258270714285715,
"min": 0.10258270714285715,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4361579000000002,
"min": 1.3886848,
"max": 2.5690289,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002680124435714286,
"min": 0.0002680124435714286,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0037521742100000005,
"min": 0.0037521742100000005,
"max": 0.11692598710999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.009253355674445629,
"min": 0.009253355674445629,
"max": 0.39021825790405273,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.12954698503017426,
"min": 0.12954698503017426,
"max": 2.731527805328369,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 512.4150943396227,
"min": 512.4150943396227,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 27158.0,
"min": 15984.0,
"max": 33443.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.221056581808711,
"min": -1.0000000521540642,
"max": 1.2985962571369276,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 64.71599883586168,
"min": -31.996801659464836,
"max": 70.1241978853941,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.221056581808711,
"min": -1.0000000521540642,
"max": 1.2985962571369276,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 64.71599883586168,
"min": -31.996801659464836,
"max": 70.1241978853941,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.051189134826549805,
"min": 0.051189134826549805,
"max": 7.553069240413606,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.7130241458071396,
"min": 2.7130241458071396,
"max": 120.8491078466177,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1741166240",
"python_version": "3.10.12 (main, Jul 5 2023, 18:54:27) [GCC 11.2.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.6.0+cu124",
"numpy_version": "1.23.5",
"end_time_seconds": "1741168257"
},
"total": 2017.88542349,
"count": 1,
"self": 0.7554275860002235,
"children": {
"run_training.setup": {
"total": 0.02074958899993362,
"count": 1,
"self": 0.02074958899993362
},
"TrainerController.start_learning": {
"total": 2017.1092463149998,
"count": 1,
"self": 1.2199819169354669,
"children": {
"TrainerController._reset_env": {
"total": 2.9051937799999905,
"count": 1,
"self": 2.9051937799999905
},
"TrainerController.advance": {
"total": 2012.8544029680638,
"count": 63438,
"self": 1.2583063030915582,
"children": {
"env_step": {
"total": 1340.4768093739594,
"count": 63438,
"self": 1194.8666227060462,
"children": {
"SubprocessEnvManager._take_step": {
"total": 144.87512884797934,
"count": 63438,
"self": 4.4101793239246945,
"children": {
"TorchPolicy.evaluate": {
"total": 140.46494952405465,
"count": 62550,
"self": 140.46494952405465
}
}
},
"workers": {
"total": 0.7350578199338997,
"count": 63438,
"self": 0.0,
"children": {
"worker_root": {
"total": 2012.5331406260839,
"count": 63438,
"is_parallel": true,
"self": 920.3807351721093,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0025614420001147664,
"count": 1,
"is_parallel": true,
"self": 0.0008145660003719968,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0017468759997427696,
"count": 8,
"is_parallel": true,
"self": 0.0017468759997427696
}
}
},
"UnityEnvironment.step": {
"total": 0.04628038700002435,
"count": 1,
"is_parallel": true,
"self": 0.0005093489999126177,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00045241199995871284,
"count": 1,
"is_parallel": true,
"self": 0.00045241199995871284
},
"communicator.exchange": {
"total": 0.0436588219999976,
"count": 1,
"is_parallel": true,
"self": 0.0436588219999976
},
"steps_from_proto": {
"total": 0.0016598040001554182,
"count": 1,
"is_parallel": true,
"self": 0.00034120800000891904,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013185960001464991,
"count": 8,
"is_parallel": true,
"self": 0.0013185960001464991
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1092.1524054539746,
"count": 63437,
"is_parallel": true,
"self": 30.392961995997666,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.418045829942002,
"count": 63437,
"is_parallel": true,
"self": 22.418045829942002
},
"communicator.exchange": {
"total": 948.9120338280391,
"count": 63437,
"is_parallel": true,
"self": 948.9120338280391
},
"steps_from_proto": {
"total": 90.42936379999583,
"count": 63437,
"is_parallel": true,
"self": 17.60526451485157,
"children": {
"_process_rank_one_or_two_observation": {
"total": 72.82409928514426,
"count": 507496,
"is_parallel": true,
"self": 72.82409928514426
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 671.1192872910128,
"count": 63438,
"self": 2.348435439039804,
"children": {
"process_trajectory": {
"total": 122.03270620797684,
"count": 63438,
"self": 121.78790839897692,
"children": {
"RLTrainer._checkpoint": {
"total": 0.24479780899991965,
"count": 2,
"self": 0.24479780899991965
}
}
},
"_update_policy": {
"total": 546.7381456439962,
"count": 447,
"self": 299.62561800302865,
"children": {
"TorchPPOOptimizer.update": {
"total": 247.11252764096753,
"count": 22764,
"self": 247.11252764096753
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.1110005289083347e-06,
"count": 1,
"self": 1.1110005289083347e-06
},
"TrainerController._save_models": {
"total": 0.12966653900002711,
"count": 1,
"self": 0.002103689000250597,
"children": {
"RLTrainer._checkpoint": {
"total": 0.12756284999977652,
"count": 1,
"self": 0.12756284999977652
}
}
}
}
}
}
}