ppo-Pyramids / run_logs /timers.json
kenzo4433's picture
Pyramids Training
c1b8fe0
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.36540231108665466,
"min": 0.35237109661102295,
"max": 1.4025382995605469,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 10921.14453125,
"min": 10599.322265625,
"max": 42547.40234375,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989991.0,
"min": 29952.0,
"max": 989991.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989991.0,
"min": 29952.0,
"max": 989991.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.4872588515281677,
"min": -0.16494891047477722,
"max": 0.5501914620399475,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 132.04714965820312,
"min": -39.092891693115234,
"max": 150.75245666503906,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.0103200264275074,
"min": -0.059089384973049164,
"max": 0.5623935461044312,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 2.796727180480957,
"min": -15.895044326782227,
"max": 133.2872772216797,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.06771961427321437,
"min": 0.06619681207204227,
"max": 0.07259268245117802,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9480745998250011,
"min": 0.4962719691095702,
"max": 1.0552266287416512,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.015742233409366146,
"min": 0.00015430757161889494,
"max": 0.015742233409366146,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.22039126773112605,
"min": 0.002160306002664529,
"max": 0.22039126773112605,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.259926151485717e-06,
"min": 7.259926151485717e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010163896612080004,
"min": 0.00010163896612080004,
"max": 0.0036326446891184992,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10241994285714284,
"min": 0.10241994285714284,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4338791999999998,
"min": 1.3886848,
"max": 2.6108814999999996,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025175229142857154,
"min": 0.00025175229142857154,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035245320800000018,
"min": 0.0035245320800000018,
"max": 0.12110706185,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012736918404698372,
"min": 0.012736918404698372,
"max": 0.4962150752544403,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.1783168613910675,
"min": 0.1783168613910675,
"max": 3.4735054969787598,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 393.2962962962963,
"min": 342.0,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31857.0,
"min": 15984.0,
"max": 32735.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 1.5325925772388775,
"min": -1.0000000521540642,
"max": 1.6350091748881614,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 124.13999875634909,
"min": -30.99120159447193,
"max": 142.24579821527004,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 1.5325925772388775,
"min": -1.0000000521540642,
"max": 1.6350091748881614,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 124.13999875634909,
"min": -30.99120159447193,
"max": 142.24579821527004,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.05251682504550209,
"min": 0.051719465093779504,
"max": 9.475311921909451,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.253862828685669,
"min": 4.0633804200042505,
"max": 151.60499075055122,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1680099905",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1680102146"
},
"total": 2240.934384835,
"count": 1,
"self": 0.47697263200007,
"children": {
"run_training.setup": {
"total": 0.12030924700002288,
"count": 1,
"self": 0.12030924700002288
},
"TrainerController.start_learning": {
"total": 2240.337102956,
"count": 1,
"self": 1.6590303649873022,
"children": {
"TrainerController._reset_env": {
"total": 6.1382265900001585,
"count": 1,
"self": 6.1382265900001585
},
"TrainerController.advance": {
"total": 2232.4418423110114,
"count": 63669,
"self": 1.7986669300603353,
"children": {
"env_step": {
"total": 1588.3292310240759,
"count": 63669,
"self": 1462.7798970060157,
"children": {
"SubprocessEnvManager._take_step": {
"total": 124.53532377808324,
"count": 63669,
"self": 5.374395466144051,
"children": {
"TorchPolicy.evaluate": {
"total": 119.16092831193919,
"count": 62556,
"self": 119.16092831193919
}
}
},
"workers": {
"total": 1.0140102399768693,
"count": 63669,
"self": 0.0,
"children": {
"worker_root": {
"total": 2234.6997151060555,
"count": 63669,
"is_parallel": true,
"self": 905.7528433140515,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0018630849999681232,
"count": 1,
"is_parallel": true,
"self": 0.0006120389998613973,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0012510460001067258,
"count": 8,
"is_parallel": true,
"self": 0.0012510460001067258
}
}
},
"UnityEnvironment.step": {
"total": 0.04632593799988172,
"count": 1,
"is_parallel": true,
"self": 0.0005560229997172428,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.0004958439999427355,
"count": 1,
"is_parallel": true,
"self": 0.0004958439999427355
},
"communicator.exchange": {
"total": 0.043483212999944953,
"count": 1,
"is_parallel": true,
"self": 0.043483212999944953
},
"steps_from_proto": {
"total": 0.0017908580002767849,
"count": 1,
"is_parallel": true,
"self": 0.0004002970003966766,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013905609998801083,
"count": 8,
"is_parallel": true,
"self": 0.0013905609998801083
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1328.946871792004,
"count": 63668,
"is_parallel": true,
"self": 33.197774520190706,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 25.705521032912202,
"count": 63668,
"is_parallel": true,
"self": 25.705521032912202
},
"communicator.exchange": {
"total": 1167.1486335849859,
"count": 63668,
"is_parallel": true,
"self": 1167.1486335849859
},
"steps_from_proto": {
"total": 102.89494265391522,
"count": 63668,
"is_parallel": true,
"self": 22.53683426210864,
"children": {
"_process_rank_one_or_two_observation": {
"total": 80.35810839180658,
"count": 509344,
"is_parallel": true,
"self": 80.35810839180658
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 642.3139443568753,
"count": 63669,
"self": 3.140440655949533,
"children": {
"process_trajectory": {
"total": 128.10260485293838,
"count": 63669,
"self": 127.88154415993813,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2210606930002541,
"count": 2,
"self": 0.2210606930002541
}
}
},
"_update_policy": {
"total": 511.07089884798734,
"count": 450,
"self": 323.42897816295545,
"children": {
"TorchPPOOptimizer.update": {
"total": 187.6419206850319,
"count": 22815,
"self": 187.6419206850319
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.082000380847603e-06,
"count": 1,
"self": 1.082000380847603e-06
},
"TrainerController._save_models": {
"total": 0.09800260800057004,
"count": 1,
"self": 0.0014748430003237445,
"children": {
"RLTrainer._checkpoint": {
"total": 0.09652776500024629,
"count": 1,
"self": 0.09652776500024629
}
}
}
}
}
}
}