poca-SoccerTwos / run_logs /timers.json
tstenborg's picture
Initial commit.
5e933e8 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.7564705610275269,
"min": 1.6862115859985352,
"max": 3.2894952297210693,
"count": 328
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 176209.125,
"min": 169322.625,
"max": 393855.34375,
"count": 328
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 59.826405867970664,
"min": 46.07924528301887,
"max": 953.9230769230769,
"count": 328
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 97876.0,
"min": 96536.0,
"max": 102964.0,
"count": 328
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1602.708888592256,
"min": 1196.5659790628026,
"max": 1629.8939584252985,
"count": 328
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 1311015.8708684654,
"min": 9595.098442082712,
"max": 1707915.410533633,
"count": 328
},
"SoccerTwos.Step.mean": {
"value": 16399992.0,
"min": 49088.0,
"max": 16399992.0,
"count": 328
},
"SoccerTwos.Step.sum": {
"value": 16399992.0,
"min": 49088.0,
"max": 16399992.0,
"count": 328
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.0033658314496278763,
"min": -0.08460671454668045,
"max": 0.1390356868505478,
"count": 328
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -2.7498843669891357,
"min": -79.2764892578125,
"max": 99.82762145996094,
"count": 328
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.0048483810387551785,
"min": -0.08766990154981613,
"max": 0.143933966755867,
"count": 328
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -3.961127281188965,
"min": -82.14669799804688,
"max": 103.34458923339844,
"count": 328
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 328
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 328
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.02692043992851472,
"min": -0.2808304330253083,
"max": 0.18349247955014114,
"count": 328
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -21.993999421596527,
"min": -151.29859918355942,
"max": 131.74760031700134,
"count": 328
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.02692043992851472,
"min": -0.2808304330253083,
"max": 0.18349247955014114,
"count": 328
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -21.993999421596527,
"min": -151.29859918355942,
"max": 131.74760031700134,
"count": 328
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.013252768508876519,
"min": 0.01297366823127959,
"max": 0.02173578997705287,
"count": 328
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.026505537017753038,
"min": 0.02594733646255918,
"max": 0.0611676517718782,
"count": 328
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10513233728706836,
"min": 0.0004024625793438948,
"max": 0.12297326177358628,
"count": 328
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.21026467457413672,
"min": 0.0008049251586877896,
"max": 0.36854586377739906,
"count": 328
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10670649247864883,
"min": 0.0004262993086740607,
"max": 0.12469334908657603,
"count": 328
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.21341298495729766,
"min": 0.0008525986173481214,
"max": 0.3740800472597281,
"count": 328
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.00019999999999999996,
"min": 0.00019999999999999996,
"max": 0.00019999999999999996,
"count": 328
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003999999999999999,
"min": 0.0003999999999999999,
"max": 0.0005999999999999998,
"count": 328
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 328
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.40000000000000013,
"min": 0.40000000000000013,
"max": 0.6000000000000002,
"count": 328
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 328
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.010000000000000002,
"min": 0.010000000000000002,
"max": 0.015000000000000003,
"count": 328
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 328
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 328
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1758538876",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\travi\\.conda\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --force",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.8.0+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1758649252"
},
"total": 110376.94695080002,
"count": 1,
"self": 1.0160212999908254,
"children": {
"run_training.setup": {
"total": 0.05945110000902787,
"count": 1,
"self": 0.05945110000902787
},
"TrainerController.start_learning": {
"total": 110375.87147840002,
"count": 1,
"self": 56.45513357798336,
"children": {
"TrainerController._reset_env": {
"total": 11.60015240003122,
"count": 83,
"self": 11.60015240003122
},
"TrainerController.advance": {
"total": 110307.50539932202,
"count": 1129479,
"self": 58.16995546070393,
"children": {
"env_step": {
"total": 44376.25314289302,
"count": 1129479,
"self": 31179.975847236754,
"children": {
"SubprocessEnvManager._take_step": {
"total": 13164.036727181985,
"count": 1129479,
"self": 366.3107746823225,
"children": {
"TorchPolicy.evaluate": {
"total": 12797.725952499663,
"count": 2071366,
"self": 12797.725952499663
}
}
},
"workers": {
"total": 32.24056847428437,
"count": 1129478,
"self": 0.0,
"children": {
"worker_root": {
"total": 110338.88217584434,
"count": 1129478,
"is_parallel": true,
"self": 85757.98004859698,
"children": {
"steps_from_proto": {
"total": 0.1040870999568142,
"count": 166,
"is_parallel": true,
"self": 0.02159029961330816,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.08249680034350604,
"count": 664,
"is_parallel": true,
"self": 0.08249680034350604
}
}
},
"UnityEnvironment.step": {
"total": 24580.798040147405,
"count": 1129478,
"is_parallel": true,
"self": 411.8885549954721,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 407.1164513088879,
"count": 1129478,
"is_parallel": true,
"self": 407.1164513088879
},
"communicator.exchange": {
"total": 22508.529119877785,
"count": 1129478,
"is_parallel": true,
"self": 22508.529119877785
},
"steps_from_proto": {
"total": 1253.2639139652601,
"count": 2258956,
"is_parallel": true,
"self": 268.47785401536385,
"children": {
"_process_rank_one_or_two_observation": {
"total": 984.7860599498963,
"count": 9035824,
"is_parallel": true,
"self": 984.7860599498963
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 65873.0823009683,
"count": 1129478,
"self": 335.68821560638025,
"children": {
"process_trajectory": {
"total": 14445.269417263276,
"count": 1129478,
"self": 14435.513299863436,
"children": {
"RLTrainer._checkpoint": {
"total": 9.75611739984015,
"count": 32,
"self": 9.75611739984015
}
}
},
"_update_policy": {
"total": 51092.12466809864,
"count": 793,
"self": 6188.487225891673,
"children": {
"TorchPOCAOptimizer.update": {
"total": 44903.637442206964,
"count": 23790,
"self": 44903.637442206964
}
}
}
}
}
}
},
"trainer_threads": {
"total": 2.300017513334751e-06,
"count": 1,
"self": 2.300017513334751e-06
},
"TrainerController._save_models": {
"total": 0.3107907999656163,
"count": 1,
"self": 0.006381499930284917,
"children": {
"RLTrainer._checkpoint": {
"total": 0.3044093000353314,
"count": 1,
"self": 0.3044093000353314
}
}
}
}
}
}
}