ppo-PyramidsRND / run_logs /timers.json
Adripro01's picture
First Push
c28e4d1 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5663087368011475,
"min": 0.5663087368011475,
"max": 1.4094384908676147,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 17097.994140625,
"min": 17097.994140625,
"max": 42756.7265625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989995.0,
"min": 29952.0,
"max": 989995.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989995.0,
"min": 29952.0,
"max": 989995.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.5567012429237366,
"min": -0.09065807610750198,
"max": 0.5567012429237366,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 153.09283447265625,
"min": -21.667280197143555,
"max": 153.09283447265625,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.0598287470638752,
"min": -0.02408597804605961,
"max": 0.4589160680770874,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 16.452905654907227,
"min": -6.62364387512207,
"max": 108.76310729980469,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06480808763428487,
"min": 0.06480808763428487,
"max": 0.07381701551474157,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9721213145142731,
"min": 0.5153528509126981,
"max": 1.070874873335318,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.013426881680950629,
"min": 0.00014813732874517142,
"max": 0.016288843025318547,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.20140322521425943,
"min": 0.0020739226024323997,
"max": 0.22804380235445965,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.487957504046672e-06,
"min": 7.487957504046672e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011231936256070007,
"min": 0.00011231936256070007,
"max": 0.0036331429889524002,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10249595333333335,
"min": 0.10249595333333335,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5374393000000002,
"min": 1.3886848,
"max": 2.6110476,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025934573800000016,
"min": 0.00025934573800000016,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0038901860700000027,
"min": 0.0038901860700000027,
"max": 0.12112365523999999,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.008682744577527046,
"min": 0.008682744577527046,
"max": 0.40611937642097473,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.13024117052555084,
"min": 0.12238536030054092,
"max": 2.8428356647491455,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 365.1927710843373,
"min": 344.09302325581393,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 30311.0,
"min": 15984.0,
"max": 33121.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5383758886930454,
"min": -1.0000000521540642,
"max": 1.59987207172915,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 127.68519876152277,
"min": -31.993201687932014,
"max": 137.5889981687069,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5383758886930454,
"min": -1.0000000521540642,
"max": 1.59987207172915,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 127.68519876152277,
"min": -31.993201687932014,
"max": 137.5889981687069,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.03249630308113936,
"min": 0.032242576374407035,
"max": 8.243971281684935,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.697193155734567,
"min": 2.697193155734567,
"max": 131.90354050695896,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1724051716",
"python_version": "3.10.12 (main, Jul 29 2024, 16:56:48) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1724054085"
},
"total": 2369.435690661,
"count": 1,
"self": 0.5268064800002321,
"children": {
"run_training.setup": {
"total": 0.05440402899967012,
"count": 1,
"self": 0.05440402899967012
},
"TrainerController.start_learning": {
"total": 2368.854480152,
"count": 1,
"self": 1.8050425470491973,
"children": {
"TrainerController._reset_env": {
"total": 2.1544526079997013,
"count": 1,
"self": 2.1544526079997013
},
"TrainerController.advance": {
"total": 2364.7932338179517,
"count": 63732,
"self": 1.893471556905297,
"children": {
"env_step": {
"total": 1693.4196136890996,
"count": 63732,
"self": 1529.7205243622147,
"children": {
"SubprocessEnvManager._take_step": {
"total": 162.5665858549046,
"count": 63732,
"self": 5.666840212965326,
"children": {
"TorchPolicy.evaluate": {
"total": 156.89974564193926,
"count": 62566,
"self": 156.89974564193926
}
}
},
"workers": {
"total": 1.1325034719802716,
"count": 63732,
"self": 0.0,
"children": {
"worker_root": {
"total": 2362.6546621050597,
"count": 63732,
"is_parallel": true,
"self": 979.7565744391163,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.00293819699982123,
"count": 1,
"is_parallel": true,
"self": 0.0007431489993905416,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0021950480004306883,
"count": 8,
"is_parallel": true,
"self": 0.0021950480004306883
}
}
},
"UnityEnvironment.step": {
"total": 0.04830731200036098,
"count": 1,
"is_parallel": true,
"self": 0.0006289989996730583,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004888120001851348,
"count": 1,
"is_parallel": true,
"self": 0.0004888120001851348
},
"communicator.exchange": {
"total": 0.0453961570001411,
"count": 1,
"is_parallel": true,
"self": 0.0453961570001411
},
"steps_from_proto": {
"total": 0.0017933440003616852,
"count": 1,
"is_parallel": true,
"self": 0.0003828690005320823,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.001410474999829603,
"count": 8,
"is_parallel": true,
"self": 0.001410474999829603
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1382.8980876659434,
"count": 63731,
"is_parallel": true,
"self": 38.41560472984702,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 26.215053359072044,
"count": 63731,
"is_parallel": true,
"self": 26.215053359072044
},
"communicator.exchange": {
"total": 1205.871665029019,
"count": 63731,
"is_parallel": true,
"self": 1205.871665029019
},
"steps_from_proto": {
"total": 112.39576454800545,
"count": 63731,
"is_parallel": true,
"self": 24.237985720020333,
"children": {
"_process_rank_one_or_two_observation": {
"total": 88.15777882798511,
"count": 509848,
"is_parallel": true,
"self": 88.15777882798511
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 669.4801485719468,
"count": 63732,
"self": 3.4620545208990734,
"children": {
"process_trajectory": {
"total": 140.27421462904113,
"count": 63732,
"self": 140.06404707804177,
"children": {
"RLTrainer._checkpoint": {
"total": 0.21016755099935835,
"count": 2,
"self": 0.21016755099935835
}
}
},
"_update_policy": {
"total": 525.7438794220066,
"count": 452,
"self": 310.31613117500274,
"children": {
"TorchPPOOptimizer.update": {
"total": 215.42774824700382,
"count": 22818,
"self": 215.42774824700382
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.3310000213095918e-06,
"count": 1,
"self": 1.3310000213095918e-06
},
"TrainerController._save_models": {
"total": 0.1017498479995993,
"count": 1,
"self": 0.0018374889996266575,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09991235899997264,
"count": 1,
"self": 0.09991235899997264
}
}
}
}
}
}
}