EvaOr's picture
Push of the model of the Pyramids Training environment
aaff221
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.3767079710960388,
"min": 0.3603907525539398,
"max": 1.3775832653045654,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 11482.05859375,
"min": 10938.6328125,
"max": 41790.3671875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989993.0,
"min": 29950.0,
"max": 989993.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989993.0,
"min": 29950.0,
"max": 989993.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.6537896394729614,
"min": -0.09571461379528046,
"max": 0.7146927714347839,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 190.25277709960938,
"min": -23.16293716430664,
"max": 202.97274780273438,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.013126983307301998,
"min": -0.013126983307301998,
"max": 0.3322901427745819,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -3.8199522495269775,
"min": -3.8199522495269775,
"max": 80.41421508789062,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06850352072520072,
"min": 0.06501737563978947,
"max": 0.07266703175388732,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.95904929015281,
"min": 0.4887095246935363,
"max": 1.0690438598263141,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015304034145845523,
"min": 0.0012501317510022977,
"max": 0.017409728616940196,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2142564780418373,
"min": 0.015001581012027573,
"max": 0.26114592925410296,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.652154592171427e-06,
"min": 7.652154592171427e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010713016429039998,
"min": 0.00010713016429039998,
"max": 0.0035108144297286,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10255068571428573,
"min": 0.10255068571428573,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4357096000000003,
"min": 1.3886848,
"max": 2.5726288999999998,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026481350285714283,
"min": 0.00026481350285714283,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.00370738904,
"min": 0.00370738904,
"max": 0.11705011286000001,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012179279699921608,
"min": 0.012179279699921608,
"max": 0.48961400985717773,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1705099195241928,
"min": 0.1705099195241928,
"max": 3.427298069000244,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 287.8611111111111,
"min": 263.58490566037733,
"max": 992.7647058823529,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31089.0,
"min": 16877.0,
"max": 32559.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.6380444260797016,
"min": -0.9166774696880772,
"max": 1.7175377281909843,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 176.90879801660776,
"min": -28.41700156033039,
"max": 182.05899918824434,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.6380444260797016,
"min": -0.9166774696880772,
"max": 1.7175377281909843,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 176.90879801660776,
"min": -28.41700156033039,
"max": 182.05899918824434,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.036504639946845906,
"min": 0.03488895420055344,
"max": 9.440492033958435,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 3.9425011142593576,
"min": 3.698229145258665,
"max": 160.4883645772934,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1680185173",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=PyramidsTraining --no-graphics --force",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1680188902"
},
"total": 3729.2332885799997,
"count": 1,
"self": 0.6360896849996607,
"children": {
"run_training.setup": {
"total": 0.22420987099985723,
"count": 1,
"self": 0.22420987099985723
},
"TrainerController.start_learning": {
"total": 3728.372989024,
"count": 1,
"self": 2.7185400809812563,
"children": {
"TrainerController._reset_env": {
"total": 8.097107924000056,
"count": 1,
"self": 8.097107924000056
},
"TrainerController.advance": {
"total": 3717.44264125302,
"count": 64332,
"self": 2.740464870950291,
"children": {
"env_step": {
"total": 2507.0804178620783,
"count": 64332,
"self": 2350.538354326012,
"children": {
"SubprocessEnvManager._take_step": {
"total": 154.88101418704355,
"count": 64332,
"self": 7.631342795039245,
"children": {
"TorchPolicy.evaluate": {
"total": 147.2496713920043,
"count": 62563,
"self": 147.2496713920043
}
}
},
"workers": {
"total": 1.66104934902296,
"count": 64332,
"self": 0.0,
"children": {
"worker_root": {
"total": 3720.5877042390007,
"count": 64332,
"is_parallel": true,
"self": 1569.5671120189477,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.005300024000007397,
"count": 1,
"is_parallel": true,
"self": 0.0018187630000738864,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.003481260999933511,
"count": 8,
"is_parallel": true,
"self": 0.003481260999933511
}
}
},
"UnityEnvironment.step": {
"total": 0.06608677999997781,
"count": 1,
"is_parallel": true,
"self": 0.0006233780002276035,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0005760399999417132,
"count": 1,
"is_parallel": true,
"self": 0.0005760399999417132
},
"communicator.exchange": {
"total": 0.06274657399990247,
"count": 1,
"is_parallel": true,
"self": 0.06274657399990247
},
"steps_from_proto": {
"total": 0.002140787999906024,
"count": 1,
"is_parallel": true,
"self": 0.00047031500002958637,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0016704729998764378,
"count": 8,
"is_parallel": true,
"self": 0.0016704729998764378
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 2151.020592220053,
"count": 64331,
"is_parallel": true,
"self": 45.72248392217534,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 27.825075067922626,
"count": 64331,
"is_parallel": true,
"self": 27.825075067922626
},
"communicator.exchange": {
"total": 1942.5127014570287,
"count": 64331,
"is_parallel": true,
"self": 1942.5127014570287
},
"steps_from_proto": {
"total": 134.96033177292634,
"count": 64331,
"is_parallel": true,
"self": 30.995202369732397,
"children": {
"_process_rank_one_or_two_observation": {
"total": 103.96512940319394,
"count": 514648,
"is_parallel": true,
"self": 103.96512940319394
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 1207.6217585199915,
"count": 64332,
"self": 5.360714091009868,
"children": {
"process_trajectory": {
"total": 177.20507640198684,
"count": 64332,
"self": 176.9228533069877,
"children": {
"RLTrainer._checkpoint": {
"total": 0.28222309499915355,
"count": 2,
"self": 0.28222309499915355
}
}
},
"_update_policy": {
"total": 1025.0559680269948,
"count": 456,
"self": 420.78344789397875,
"children": {
"TorchPPOOptimizer.update": {
"total": 604.2725201330161,
"count": 22791,
"self": 604.2725201330161
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.050999344442971e-06,
"count": 1,
"self": 1.050999344442971e-06
},
"TrainerController._save_models": {
"total": 0.11469871499957662,
"count": 1,
"self": 0.0035911059994759853,
"children": {
"RLTrainer._checkpoint": {
"total": 0.11110760900010064,
"count": 1,
"self": 0.11110760900010064
}
}
}
}
}
}
}