Pyramids / run_logs /timers.json
Miltomo's picture
First Push
1aa69b0 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.6934102773666382,
"min": 0.6745714545249939,
"max": 1.4631102085113525,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 20702.45703125,
"min": 20150.798828125,
"max": 44384.91015625,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989991.0,
"min": 29952.0,
"max": 989991.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989991.0,
"min": 29952.0,
"max": 989991.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.20361100137233734,
"min": -0.13542723655700684,
"max": 0.20361100137233734,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 51.920806884765625,
"min": -32.096256256103516,
"max": 51.920806884765625,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.19911488890647888,
"min": 0.01804698072373867,
"max": 0.4217011630535126,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 50.774295806884766,
"min": 4.349322319030762,
"max": 101.20828247070312,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06449348936981622,
"min": 0.06413171239945964,
"max": 0.07421490162188453,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.902908851177427,
"min": 0.48523599023535313,
"max": 1.0298669329974526,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.014976870682436508,
"min": 7.232142720461897e-05,
"max": 0.014976870682436508,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2096761895541111,
"min": 0.0010124999808646656,
"max": 0.2096761895541111,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.597340324728573e-06,
"min": 7.597340324728573e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010636276454620003,
"min": 0.00010636276454620003,
"max": 0.0032557049147651,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10253241428571429,
"min": 0.10253241428571429,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4354538000000001,
"min": 1.3691136000000002,
"max": 2.3852349,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002629881871428573,
"min": 0.0002629881871428573,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003681834620000002,
"min": 0.003681834620000002,
"max": 0.10854496651000001,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.016609277576208115,
"min": 0.016609277576208115,
"max": 0.5913077592849731,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.2325298935174942,
"min": 0.2325298935174942,
"max": 4.139154434204102,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 672.4318181818181,
"min": 672.4318181818181,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29587.0,
"min": 15984.0,
"max": 33436.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 0.7364863250404596,
"min": -1.0000000521540642,
"max": 0.7364863250404596,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 32.405398301780224,
"min": -32.000001668930054,
"max": 32.405398301780224,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 0.7364863250404596,
"min": -1.0000000521540642,
"max": 0.7364863250404596,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 32.405398301780224,
"min": -32.000001668930054,
"max": 32.405398301780224,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.11711653845056637,
"min": 0.11711653845056637,
"max": 12.477840095758438,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 5.1531276918249205,
"min": 5.033200995065272,
"max": 199.645441532135,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1712904076",
"python_version": "3.10.12 (main, Nov 20 2023, 15:14:05) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.2.1+cu121",
"numpy_version": "1.23.5",
"end_time_seconds": "1712906031"
},
"total": 1955.1136022010003,
"count": 1,
"self": 0.4895655190002799,
"children": {
"run_training.setup": {
"total": 0.04855234800015751,
"count": 1,
"self": 0.04855234800015751
},
"TrainerController.start_learning": {
"total": 1954.5754843339998,
"count": 1,
"self": 1.23401496098586,
"children": {
"TrainerController._reset_env": {
"total": 2.230292125999995,
"count": 1,
"self": 2.230292125999995
},
"TrainerController.advance": {
"total": 1951.0244794650139,
"count": 63227,
"self": 1.353996692026385,
"children": {
"env_step": {
"total": 1334.5167301789695,
"count": 63227,
"self": 1210.0324815859096,
"children": {
"SubprocessEnvManager._take_step": {
"total": 123.71662630607102,
"count": 63227,
"self": 4.426764573089031,
"children": {
"TorchPolicy.evaluate": {
"total": 119.28986173298199,
"count": 62569,
"self": 119.28986173298199
}
}
},
"workers": {
"total": 0.7676222869888534,
"count": 63227,
"self": 0.0,
"children": {
"worker_root": {
"total": 1949.8848670379343,
"count": 63227,
"is_parallel": true,
"self": 851.026072879976,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0020467740000640333,
"count": 1,
"is_parallel": true,
"self": 0.000612486999898465,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0014342870001655683,
"count": 8,
"is_parallel": true,
"self": 0.0014342870001655683
}
}
},
"UnityEnvironment.step": {
"total": 0.10841299100002288,
"count": 1,
"is_parallel": true,
"self": 0.0006472490001669939,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004771129999880941,
"count": 1,
"is_parallel": true,
"self": 0.0004771129999880941
},
"communicator.exchange": {
"total": 0.10543688400002793,
"count": 1,
"is_parallel": true,
"self": 0.10543688400002793
},
"steps_from_proto": {
"total": 0.001851744999839866,
"count": 1,
"is_parallel": true,
"self": 0.00039367900035358616,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.00145806599948628,
"count": 8,
"is_parallel": true,
"self": 0.00145806599948628
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1098.8587941579583,
"count": 63226,
"is_parallel": true,
"self": 33.45813928491816,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 23.097775213997238,
"count": 63226,
"is_parallel": true,
"self": 23.097775213997238
},
"communicator.exchange": {
"total": 946.7239358080117,
"count": 63226,
"is_parallel": true,
"self": 946.7239358080117
},
"steps_from_proto": {
"total": 95.57894385103123,
"count": 63226,
"is_parallel": true,
"self": 18.807738437941907,
"children": {
"_process_rank_one_or_two_observation": {
"total": 76.77120541308932,
"count": 505808,
"is_parallel": true,
"self": 76.77120541308932
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 615.153752594018,
"count": 63227,
"self": 2.2930042369828243,
"children": {
"process_trajectory": {
"total": 123.12713578103353,
"count": 63227,
"self": 122.92651839003315,
"children": {
"RLTrainer._checkpoint": {
"total": 0.20061739100037812,
"count": 2,
"self": 0.20061739100037812
}
}
},
"_update_policy": {
"total": 489.73361257600163,
"count": 437,
"self": 288.32583582399434,
"children": {
"TorchPPOOptimizer.update": {
"total": 201.4077767520073,
"count": 22830,
"self": 201.4077767520073
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0379999366705306e-06,
"count": 1,
"self": 1.0379999366705306e-06
},
"TrainerController._save_models": {
"total": 0.08669674400016447,
"count": 1,
"self": 0.0013907240004300547,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08530601999973442,
"count": 1,
"self": 0.08530601999973442
}
}
}
}
}
}
}