poca-SoccerTwos / run_logs /timers.json
Fetanos's picture
First Push`
96fdac4 verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.9099375009536743,
"min": 1.8661675453186035,
"max": 3.2956924438476562,
"count": 502
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 38993.28515625,
"min": 19750.966796875,
"max": 123137.859375,
"count": 502
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 53.380434782608695,
"min": 39.28225806451613,
"max": 999.0,
"count": 502
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19644.0,
"min": 11716.0,
"max": 29236.0,
"count": 502
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1519.1940760052948,
"min": 1193.9212761018673,
"max": 1533.515514826359,
"count": 474
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 279531.70998497424,
"min": 2388.225396237318,
"max": 366032.7195436335,
"count": 474
},
"SoccerTwos.Step.mean": {
"value": 5019975.0,
"min": 9634.0,
"max": 5019975.0,
"count": 502
},
"SoccerTwos.Step.sum": {
"value": 5019975.0,
"min": 9634.0,
"max": 5019975.0,
"count": 502
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.014535492286086082,
"min": -0.08617693930864334,
"max": 0.16853713989257812,
"count": 502
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -2.674530506134033,
"min": -19.663602828979492,
"max": 24.766143798828125,
"count": 502
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.016605282202363014,
"min": -0.08135741204023361,
"max": 0.16739872097969055,
"count": 502
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -3.0553717613220215,
"min": -20.176637649536133,
"max": 23.97747039794922,
"count": 502
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 502
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 502
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.023803258719651596,
"min": -0.7142857142857143,
"max": 0.5368666678667069,
"count": 502
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 4.3797996044158936,
"min": -61.33819967508316,
"max": 50.10979998111725,
"count": 502
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.023803258719651596,
"min": -0.7142857142857143,
"max": 0.5368666678667069,
"count": 502
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 4.3797996044158936,
"min": -61.33819967508316,
"max": 50.10979998111725,
"count": 502
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 502
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 502
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.015504540004379426,
"min": 0.010788624302949756,
"max": 0.02320948595685574,
"count": 240
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.015504540004379426,
"min": 0.010788624302949756,
"max": 0.02320948595685574,
"count": 240
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10499235888322195,
"min": 1.9996884664881993e-06,
"max": 0.12079845145344734,
"count": 240
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10499235888322195,
"min": 1.9996884664881993e-06,
"max": 0.12079845145344734,
"count": 240
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10644705643256505,
"min": 2.6293486750243272e-06,
"max": 0.1227307620147864,
"count": 240
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10644705643256505,
"min": 2.6293486750243272e-06,
"max": 0.1227307620147864,
"count": 240
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 240
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 240
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 240
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.20000000000000007,
"max": 0.20000000000000007,
"count": 240
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 240
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005000000000000001,
"max": 0.005000000000000001,
"count": 240
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1715866880",
"python_version": "3.10.12 | packaged by Anaconda, Inc. | (main, Jul 5 2023, 19:01:18) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\Stefanos\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "1.1.0.dev0",
"mlagents_envs_version": "1.1.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.3.0+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1715901395"
},
"total": 34515.322172500004,
"count": 1,
"self": 2.669409000016458,
"children": {
"run_training.setup": {
"total": 0.08809079999628011,
"count": 1,
"self": 0.08809079999628011
},
"TrainerController.start_learning": {
"total": 34512.56467269999,
"count": 1,
"self": 20.805050805684004,
"children": {
"TrainerController._reset_env": {
"total": 5.184445299993968,
"count": 26,
"self": 5.184445299993968
},
"TrainerController.advance": {
"total": 34486.22540859431,
"count": 342100,
"self": 23.311311696874327,
"children": {
"env_step": {
"total": 13792.796540091826,
"count": 342100,
"self": 9704.924439886265,
"children": {
"SubprocessEnvManager._take_step": {
"total": 4074.3288993037495,
"count": 342100,
"self": 140.05613029219967,
"children": {
"TorchPolicy.evaluate": {
"total": 3934.27276901155,
"count": 634252,
"self": 3934.27276901155
}
}
},
"workers": {
"total": 13.54320090181136,
"count": 342099,
"self": 0.0,
"children": {
"worker_root": {
"total": 34481.35648600158,
"count": 342099,
"is_parallel": true,
"self": 27277.232640405367,
"children": {
"steps_from_proto": {
"total": 0.12828029996308032,
"count": 52,
"is_parallel": true,
"self": 0.026462799905857537,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.10181750005722279,
"count": 208,
"is_parallel": true,
"self": 0.10181750005722279
}
}
},
"UnityEnvironment.step": {
"total": 7203.995565296253,
"count": 342099,
"is_parallel": true,
"self": 511.52673209626664,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 464.5084292975662,
"count": 342099,
"is_parallel": true,
"self": 464.5084292975662
},
"communicator.exchange": {
"total": 4619.977792499427,
"count": 342099,
"is_parallel": true,
"self": 4619.977792499427
},
"steps_from_proto": {
"total": 1607.982611402993,
"count": 684198,
"is_parallel": true,
"self": 334.3732184034452,
"children": {
"_process_rank_one_or_two_observation": {
"total": 1273.6093929995477,
"count": 2736792,
"is_parallel": true,
"self": 1273.6093929995477
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 20670.117556805613,
"count": 342099,
"self": 126.89241390427196,
"children": {
"process_trajectory": {
"total": 4399.967536801341,
"count": 342099,
"self": 4396.0436804013225,
"children": {
"RLTrainer._checkpoint": {
"total": 3.923856400018849,
"count": 10,
"self": 3.923856400018849
}
}
},
"_update_policy": {
"total": 16143.2576061,
"count": 241,
"self": 2189.836036298977,
"children": {
"TorchPOCAOptimizer.update": {
"total": 13953.421569801023,
"count": 7230,
"self": 13953.421569801023
}
}
}
}
}
}
},
"trainer_threads": {
"total": 2.2000021999701858e-06,
"count": 1,
"self": 2.2000021999701858e-06
},
"TrainerController._save_models": {
"total": 0.3497657999978401,
"count": 1,
"self": 0.0943046000029426,
"children": {
"RLTrainer._checkpoint": {
"total": 0.25546119999489747,
"count": 1,
"self": 0.25546119999489747
}
}
}
}
}
}
}