ppo-Pyramids / run_logs /timers.json
akidse's picture
first-commit
90dbac0
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.5344411134719849,
"min": 0.5077705979347229,
"max": 1.5152571201324463,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 15887.865234375,
"min": 15265.615234375,
"max": 45966.83984375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989929.0,
"min": 29952.0,
"max": 989929.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989929.0,
"min": 29952.0,
"max": 989929.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.40177106857299805,
"min": -0.10633585602045059,
"max": 0.42841634154319763,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 105.26402282714844,
"min": -25.626941680908203,
"max": 119.09974670410156,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.1961318701505661,
"min": -0.06456505507230759,
"max": 0.21784044802188873,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 51.38655090332031,
"min": -16.851478576660156,
"max": 52.281707763671875,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06734459724201278,
"min": 0.06572891073440024,
"max": 0.07216782623720577,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9428243613881789,
"min": 0.48531011296851,
"max": 1.0772933267769131,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.040791831452122965,
"min": 0.00032096227745021666,
"max": 0.040791831452122965,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.5710856403297215,
"min": 0.0025676982196017333,
"max": 0.5710856403297215,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.665183159257143e-06,
"min": 7.665183159257143e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.0001073125642296,
"min": 0.0001073125642296,
"max": 0.0032539119153627994,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10255502857142858,
"min": 0.10255502857142858,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4357704,
"min": 1.3691136000000002,
"max": 2.4846372000000008,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.0002652473542857143,
"min": 0.0002652473542857143,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0037134629600000003,
"min": 0.0037134629600000003,
"max": 0.10849525627999998,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.00995674915611744,
"min": 0.00995674915611744,
"max": 0.33786579966545105,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.13939449191093445,
"min": 0.13939449191093445,
"max": 2.365060567855835,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 451.0923076923077,
"min": 392.91358024691357,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 29321.0,
"min": 15984.0,
"max": 33444.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.3950124765979126,
"min": -1.0000000521540642,
"max": 1.5329851681067619,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 89.2807985022664,
"min": -32.000001668930054,
"max": 124.17179861664772,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.3950124765979126,
"min": -1.0000000521540642,
"max": 1.5329851681067619,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 89.2807985022664,
"min": -32.000001668930054,
"max": 124.17179861664772,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.04595074188432591,
"min": 0.042200578046627746,
"max": 6.687923964112997,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.940847480596858,
"min": 2.940847480596858,
"max": 107.00678342580795,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1699297424",
"python_version": "3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.1.0+cu118",
"numpy_version": "1.23.5",
"end_time_seconds": "1699299808"
},
"total": 2383.627472986,
"count": 1,
"self": 0.5266920769995522,
"children": {
"run_training.setup": {
"total": 0.04253116199993201,
"count": 1,
"self": 0.04253116199993201
},
"TrainerController.start_learning": {
"total": 2383.0582497470004,
"count": 1,
"self": 1.605543590934758,
"children": {
"TrainerController._reset_env": {
"total": 3.7947257870000612,
"count": 1,
"self": 3.7947257870000612
},
"TrainerController.advance": {
"total": 2377.5792388330656,
"count": 63577,
"self": 1.7094205041207715,
"children": {
"env_step": {
"total": 1719.85688479795,
"count": 63577,
"self": 1569.7078749249029,
"children": {
"SubprocessEnvManager._take_step": {
"total": 149.15135678403226,
"count": 63577,
"self": 5.193222593070232,
"children": {
"TorchPolicy.evaluate": {
"total": 143.95813419096203,
"count": 62551,
"self": 143.95813419096203
}
}
},
"workers": {
"total": 0.9976530890148751,
"count": 63577,
"self": 0.0,
"children": {
"worker_root": {
"total": 2377.6518008679795,
"count": 63577,
"is_parallel": true,
"self": 936.6422303319664,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0018191399999523128,
"count": 1,
"is_parallel": true,
"self": 0.0005852790002336405,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012338609997186722,
"count": 8,
"is_parallel": true,
"self": 0.0012338609997186722
}
}
},
"UnityEnvironment.step": {
"total": 0.10300508699992861,
"count": 1,
"is_parallel": true,
"self": 0.0005590380001194717,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00042520099987086724,
"count": 1,
"is_parallel": true,
"self": 0.00042520099987086724
},
"communicator.exchange": {
"total": 0.10046036099993216,
"count": 1,
"is_parallel": true,
"self": 0.10046036099993216
},
"steps_from_proto": {
"total": 0.0015604870000061055,
"count": 1,
"is_parallel": true,
"self": 0.0003209439998954622,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012395430001106433,
"count": 8,
"is_parallel": true,
"self": 0.0012395430001106433
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1441.0095705360131,
"count": 63576,
"is_parallel": true,
"self": 37.21391078217994,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 25.082663968901443,
"count": 63576,
"is_parallel": true,
"self": 25.082663968901443
},
"communicator.exchange": {
"total": 1275.2703111149704,
"count": 63576,
"is_parallel": true,
"self": 1275.2703111149704
},
"steps_from_proto": {
"total": 103.44268466996141,
"count": 63576,
"is_parallel": true,
"self": 21.324029914028188,
"children": {
"_process_rank_one_or_two_observation": {
"total": 82.11865475593322,
"count": 508608,
"is_parallel": true,
"self": 82.11865475593322
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 656.0129335309948,
"count": 63577,
"self": 3.135818895026887,
"children": {
"process_trajectory": {
"total": 125.2033299679756,
"count": 63577,
"self": 125.03661363697574,
"children": {
"RLTrainer._checkpoint": {
"total": 0.16671633099986138,
"count": 2,
"self": 0.16671633099986138
}
}
},
"_update_policy": {
"total": 527.6737846679923,
"count": 439,
"self": 313.5181837800001,
"children": {
"TorchPPOOptimizer.update": {
"total": 214.15560088799225,
"count": 22839,
"self": 214.15560088799225
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.959999260900076e-07,
"count": 1,
"self": 9.959999260900076e-07
},
"TrainerController._save_models": {
"total": 0.0787405400001262,
"count": 1,
"self": 0.001396270000441291,
"children": {
"RLTrainer._checkpoint": {
"total": 0.07734426999968491,
"count": 1,
"self": 0.07734426999968491
}
}
}
}
}
}
}