ppo-Pyramids / run_logs /timers.json
happyxujin's picture
First Push
992e871 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.42023995518684387,
"min": 0.417312353849411,
"max": 1.3780100345611572,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 12728.2275390625,
"min": 12359.123046875,
"max": 41803.3125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989916.0,
"min": 29952.0,
"max": 989916.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989916.0,
"min": 29952.0,
"max": 989916.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.494508296251297,
"min": -0.11714667826890945,
"max": 0.494508296251297,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 134.50625610351562,
"min": -28.232349395751953,
"max": 134.50625610351562,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.11991354823112488,
"min": -0.11991354823112488,
"max": 0.37842684984207153,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -32.616485595703125,
"min": -32.616485595703125,
"max": 89.68716430664062,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06571847101717831,
"min": 0.06571847101717831,
"max": 0.07338217595599406,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9200585942404964,
"min": 0.5136752316919584,
"max": 1.0585702346733716,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.025316577455261557,
"min": 9.872927177958787e-05,
"max": 0.025316577455261557,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.3544320843736618,
"min": 0.0011847512613550545,
"max": 0.3544320843736618,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.677033155307145e-06,
"min": 7.677033155307145e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010747846417430002,
"min": 0.00010747846417430002,
"max": 0.0032228863257045995,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.1025589785714286,
"min": 0.1025589785714286,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4358257000000003,
"min": 1.3886848,
"max": 2.3591821000000004,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026564195928571434,
"min": 0.00026564195928571434,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003718987430000001,
"min": 0.003718987430000001,
"max": 0.10744211046,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012148609384894371,
"min": 0.012148609384894371,
"max": 0.577397882938385,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1700805276632309,
"min": 0.1700805276632309,
"max": 4.04178524017334,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 347.358024691358,
"min": 347.358024691358,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 28136.0,
"min": 15984.0,
"max": 33088.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.578538251971757,
"min": -1.0000000521540642,
"max": 1.578538251971757,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 127.86159840971231,
"min": -31.999601677060127,
"max": 127.86159840971231,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.578538251971757,
"min": -1.0000000521540642,
"max": 1.578538251971757,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 127.86159840971231,
"min": -31.999601677060127,
"max": 127.86159840971231,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.043632295292393986,
"min": 0.043632295292393986,
"max": 12.205158620607108,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.5342159186839126,
"min": 3.5342159186839126,
"max": 195.28253792971373,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1707277037",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics --force",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.0+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1707279226"
},
"total": 2188.4617900969997,
"count": 1,
"self": 0.5463724719998027,
"children": {
"run_training.setup": {
"total": 0.0513900639999747,
"count": 1,
"self": 0.0513900639999747
},
"TrainerController.start_learning": {
"total": 2187.864027561,
"count": 1,
"self": 1.364189994001208,
"children": {
"TrainerController._reset_env": {
"total": 3.480654238999932,
"count": 1,
"self": 3.480654238999932
},
"TrainerController.advance": {
"total": 2182.9293550039993,
"count": 63501,
"self": 1.4648795259768121,
"children": {
"env_step": {
"total": 1562.4504229729687,
"count": 63501,
"self": 1426.6441298939899,
"children": {
"SubprocessEnvManager._take_step": {
"total": 134.96270886198624,
"count": 63501,
"self": 4.798783017974301,
"children": {
"TorchPolicy.evaluate": {
"total": 130.16392584401194,
"count": 62557,
"self": 130.16392584401194
}
}
},
"workers": {
"total": 0.8435842169925536,
"count": 63501,
"self": 0.0,
"children": {
"worker_root": {
"total": 2182.709240264026,
"count": 63501,
"is_parallel": true,
"self": 873.9776969380409,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.005504161999965618,
"count": 1,
"is_parallel": true,
"self": 0.003926568999986557,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0015775929999790606,
"count": 8,
"is_parallel": true,
"self": 0.0015775929999790606
}
}
},
"UnityEnvironment.step": {
"total": 0.09534597100002884,
"count": 1,
"is_parallel": true,
"self": 0.00064071400015564,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005290459999969244,
"count": 1,
"is_parallel": true,
"self": 0.0005290459999969244
},
"communicator.exchange": {
"total": 0.09241548599993621,
"count": 1,
"is_parallel": true,
"self": 0.09241548599993621
},
"steps_from_proto": {
"total": 0.0017607249999400665,
"count": 1,
"is_parallel": true,
"self": 0.000363355999866144,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013973690000739225,
"count": 8,
"is_parallel": true,
"self": 0.0013973690000739225
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1308.731543325985,
"count": 63500,
"is_parallel": true,
"self": 35.86159204892397,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 26.22287820202314,
"count": 63500,
"is_parallel": true,
"self": 26.22287820202314
},
"communicator.exchange": {
"total": 1142.1938291900556,
"count": 63500,
"is_parallel": true,
"self": 1142.1938291900556
},
"steps_from_proto": {
"total": 104.45324388498238,
"count": 63500,
"is_parallel": true,
"self": 21.048827884020398,
"children": {
"_process_rank_one_or_two_observation": {
"total": 83.40441600096199,
"count": 508000,
"is_parallel": true,
"self": 83.40441600096199
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 619.0140525050538,
"count": 63501,
"self": 2.6065719700758336,
"children": {
"process_trajectory": {
"total": 124.39129896298061,
"count": 63501,
"self": 123.9897360509807,
"children": {
"RLTrainer._checkpoint": {
"total": 0.40156291199991756,
"count": 2,
"self": 0.40156291199991756
}
}
},
"_update_policy": {
"total": 492.0161815719973,
"count": 438,
"self": 286.9514165750154,
"children": {
"TorchPPOOptimizer.update": {
"total": 205.06476499698192,
"count": 22851,
"self": 205.06476499698192
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.40000063565094e-07,
"count": 1,
"self": 9.40000063565094e-07
},
"TrainerController._save_models": {
"total": 0.08982738399981827,
"count": 1,
"self": 0.0013223609994383878,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08850502300037988,
"count": 1,
"self": 0.08850502300037988
}
}
}
}
}
}
}