Krazeder's picture
First Push
7da8d34 verified
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.45337778329849243,
"min": 0.45337778329849243,
"max": 1.4459898471832275,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 13579.5712890625,
"min": 13579.5712890625,
"max": 43865.546875,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989988.0,
"min": 29952.0,
"max": 989988.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989988.0,
"min": 29952.0,
"max": 989988.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.4047192335128784,
"min": -0.10769429057836533,
"max": 0.42652279138565063,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 108.46475219726562,
"min": -26.16971206665039,
"max": 114.30810546875,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": -0.004918779246509075,
"min": -0.011400463059544563,
"max": 0.40535178780555725,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": -1.3182328939437866,
"min": -3.0325231552124023,
"max": 96.06837463378906,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07056551573785126,
"min": 0.06444140027985781,
"max": 0.07428677535722678,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 1.0584827360677689,
"min": 0.5088646117833325,
"max": 1.082707608966659,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.01582521658581552,
"min": 0.00015461617455306097,
"max": 0.01582521658581552,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.2373782487872328,
"min": 0.0020100102691897924,
"max": 0.2373782487872328,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.560977479706667e-06,
"min": 7.560977479706667e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00011341466219560001,
"min": 0.00011341466219560001,
"max": 0.0034919005360332,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10252029333333333,
"min": 0.10252029333333333,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.5378044,
"min": 1.3886848,
"max": 2.4639668,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00026177730400000007,
"min": 0.00026177730400000007,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.003926659560000001,
"min": 0.003926659560000001,
"max": 0.11641028332,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.009095985442399979,
"min": 0.008745396509766579,
"max": 0.3661887049674988,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.13643978536128998,
"min": 0.1224355548620224,
"max": 2.5633208751678467,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 413.05970149253733,
"min": 413.05970149253733,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 27675.0,
"min": 15984.0,
"max": 33903.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.4366636102398236,
"min": -1.0000000521540642,
"max": 1.4366636102398236,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 94.81979827582836,
"min": -31.995201647281647,
"max": 101.60499835014343,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.4366636102398236,
"min": -1.0000000521540642,
"max": 1.4366636102398236,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 94.81979827582836,
"min": -31.995201647281647,
"max": 101.60499835014343,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.038740578888047654,
"min": 0.03868145177289585,
"max": 7.478278869763017,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 2.5568782066111453,
"min": 2.5568782066111453,
"max": 119.65246191620827,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1741203537",
"python_version": "3.10.12 (main, Sep 11 2024, 15:47:36) [GCC 11.4.0]",
"command_line_arguments": "/home/kraz/Documents/DeepRLCourse/ml-agents/VENV/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.6.0+cu124",
"numpy_version": "1.23.5",
"end_time_seconds": "1741204855"
},
"total": 1317.968473361,
"count": 1,
"self": 0.2690927040002862,
"children": {
"run_training.setup": {
"total": 0.013954913999896235,
"count": 1,
"self": 0.013954913999896235
},
"TrainerController.start_learning": {
"total": 1317.6854257429998,
"count": 1,
"self": 0.8415892370196616,
"children": {
"TrainerController._reset_env": {
"total": 1.5151184090000243,
"count": 1,
"self": 1.5151184090000243
},
"TrainerController.advance": {
"total": 1315.2693330819798,
"count": 63438,
"self": 0.8165599330429814,
"children": {
"env_step": {
"total": 860.8822631099536,
"count": 63438,
"self": 761.0568589329214,
"children": {
"SubprocessEnvManager._take_step": {
"total": 99.28559918407996,
"count": 63438,
"self": 2.346536811041233,
"children": {
"TorchPolicy.evaluate": {
"total": 96.93906237303872,
"count": 62562,
"self": 96.93906237303872
}
}
},
"workers": {
"total": 0.5398049929522131,
"count": 63438,
"self": 0.0,
"children": {
"worker_root": {
"total": 1316.1398561559802,
"count": 63438,
"is_parallel": true,
"self": 619.9053040199549,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0011836020000828285,
"count": 1,
"is_parallel": true,
"self": 0.00031521199980488745,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.000868390000277941,
"count": 8,
"is_parallel": true,
"self": 0.000868390000277941
}
}
},
"UnityEnvironment.step": {
"total": 0.022986456999888105,
"count": 1,
"is_parallel": true,
"self": 0.0002501189999293274,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0003391269999610813,
"count": 1,
"is_parallel": true,
"self": 0.0003391269999610813
},
"communicator.exchange": {
"total": 0.02166052799998397,
"count": 1,
"is_parallel": true,
"self": 0.02166052799998397
},
"steps_from_proto": {
"total": 0.000736683000013727,
"count": 1,
"is_parallel": true,
"self": 0.0002028720000453177,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0005338109999684093,
"count": 8,
"is_parallel": true,
"self": 0.0005338109999684093
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 696.2345521360253,
"count": 63437,
"is_parallel": true,
"self": 23.863808253042407,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 15.566934434020595,
"count": 63437,
"is_parallel": true,
"self": 15.566934434020595
},
"communicator.exchange": {
"total": 594.1222001349615,
"count": 63437,
"is_parallel": true,
"self": 594.1222001349615
},
"steps_from_proto": {
"total": 62.68160931400075,
"count": 63437,
"is_parallel": true,
"self": 12.508354508128377,
"children": {
"_process_rank_one_or_two_observation": {
"total": 50.173254805872375,
"count": 507496,
"is_parallel": true,
"self": 50.173254805872375
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 453.57051003898323,
"count": 63438,
"self": 1.4379415429164055,
"children": {
"process_trajectory": {
"total": 74.6194224870676,
"count": 63438,
"self": 74.47729875106756,
"children": {
"RLTrainer._checkpoint": {
"total": 0.14212373600003048,
"count": 2,
"self": 0.14212373600003048
}
}
},
"_update_policy": {
"total": 377.51314600899923,
"count": 446,
"self": 199.6025343140136,
"children": {
"TorchPPOOptimizer.update": {
"total": 177.91061169498562,
"count": 22803,
"self": 177.91061169498562
}
}
}
}
}
}
},
"trainer_threads": {
"total": 5.510000846697949e-07,
"count": 1,
"self": 5.510000846697949e-07
},
"TrainerController._save_models": {
"total": 0.059384464000231674,
"count": 1,
"self": 0.0011513720000948524,
"children": {
"RLTrainer._checkpoint": {
"total": 0.05823309200013682,
"count": 1,
"self": 0.05823309200013682
}
}
}
}
}
}
}