poca-SoccerTwos / run_logs /timers.json
harikc456's picture
First Push
304457f
raw
history blame
15.6 kB
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 1.678034782409668,
"min": 1.5968852043151855,
"max": 3.233602523803711,
"count": 1760
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 33077.421875,
"min": 17696.080078125,
"max": 117138.6796875,
"count": 1760
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 91.75925925925925,
"min": 62.217948717948715,
"max": 999.0,
"count": 1760
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19820.0,
"min": 2580.0,
"max": 30880.0,
"count": 1760
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1836.438429277303,
"min": 1179.2443037592927,
"max": 1882.37162875756,
"count": 1640
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 198335.35036194872,
"min": 2361.124059228245,
"max": 287966.5509843448,
"count": 1640
},
"SoccerTwos.Step.mean": {
"value": 17769978.0,
"min": 179784.0,
"max": 17769978.0,
"count": 1760
},
"SoccerTwos.Step.sum": {
"value": 17769978.0,
"min": 179784.0,
"max": 17769978.0,
"count": 1760
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": -0.06006792560219765,
"min": -0.13460491597652435,
"max": 0.27712440490722656,
"count": 1760
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": -6.427268028259277,
"min": -16.538028717041016,
"max": 29.072328567504883,
"count": 1760
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.0632435604929924,
"min": -0.13357815146446228,
"max": 0.27661097049713135,
"count": 1760
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -6.76706075668335,
"min": -16.533781051635742,
"max": 29.783599853515625,
"count": 1760
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1760
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 1760
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": -0.14345046738597833,
"min": -0.5008588227279046,
"max": 0.6801075255999001,
"count": 1760
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": -15.349200010299683,
"min": -38.858800172805786,
"max": 73.20900005102158,
"count": 1760
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": -0.14345046738597833,
"min": -0.5008588227279046,
"max": 0.6801075255999001,
"count": 1760
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": -15.349200010299683,
"min": -38.858800172805786,
"max": 73.20900005102158,
"count": 1760
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1760
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 1760
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.021148083967273124,
"min": 0.012226460961974226,
"max": 0.02482709277770482,
"count": 846
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.021148083967273124,
"min": 0.012226460961974226,
"max": 0.02482709277770482,
"count": 846
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.058767483830451966,
"min": 1.941374570435528e-07,
"max": 0.07228325821459293,
"count": 846
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.058767483830451966,
"min": 1.941374570435528e-07,
"max": 0.07228325821459293,
"count": 846
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.06154869928956032,
"min": 2.1960907531592966e-07,
"max": 0.07937255546450615,
"count": 846
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.06154869928956032,
"min": 2.1960907531592966e-07,
"max": 0.07937255546450615,
"count": 846
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.00030000000000000003,
"count": 846
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.00030000000000000003,
"count": 846
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.19999999999999996,
"min": 0.1999999999999999,
"max": 0.19999999999999996,
"count": 846
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.19999999999999996,
"min": 0.1999999999999999,
"max": 0.19999999999999996,
"count": 846
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005,
"min": 0.005,
"max": 0.005,
"count": 846
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005,
"min": 0.005,
"max": 0.005,
"count": 846
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1679283741",
"python_version": "3.9.16 (main, Mar 8 2023, 10:39:24) [MSC v.1916 64 bit (AMD64)]",
"command_line_arguments": "C:\\Users\\Harikrishnan\\anaconda3\\envs\\rl\\Scripts\\mlagents-learn ./config/poca/SoccerTwos.yaml --env=./training-envs-executables/SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.0.0+cu117",
"numpy_version": "1.21.2",
"end_time_seconds": "1679323876"
},
"total": 40135.4429809,
"count": 1,
"self": 2.7203401000006124,
"children": {
"run_training.setup": {
"total": 0.2814753000000003,
"count": 1,
"self": 0.2814753000000003
},
"TrainerController.start_learning": {
"total": 40132.4411655,
"count": 1,
"self": 24.566991302461247,
"children": {
"TrainerController._reset_env": {
"total": 27.113152699983004,
"count": 89,
"self": 27.113152699983004
},
"TrainerController.advance": {
"total": 40080.46692929755,
"count": 1171992,
"self": 24.480452696843713,
"children": {
"env_step": {
"total": 26651.92155879969,
"count": 1171992,
"self": 16748.34745229964,
"children": {
"SubprocessEnvManager._take_step": {
"total": 9884.468393799361,
"count": 1171992,
"self": 214.8578931963948,
"children": {
"TorchPolicy.evaluate": {
"total": 9669.610500602967,
"count": 2224116,
"self": 9669.610500602967
}
}
},
"workers": {
"total": 19.10571270068916,
"count": 1171992,
"self": 0.0,
"children": {
"worker_root": {
"total": 40063.883836604226,
"count": 1171992,
"is_parallel": true,
"self": 26597.65168740455,
"children": {
"steps_from_proto": {
"total": 0.17281169997980683,
"count": 178,
"is_parallel": true,
"self": 0.03303249997880897,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.13977920000099786,
"count": 712,
"is_parallel": true,
"self": 0.13977920000099786
}
}
},
"UnityEnvironment.step": {
"total": 13466.059337499697,
"count": 1171992,
"is_parallel": true,
"self": 924.8286633054868,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 685.6071762989832,
"count": 1171992,
"is_parallel": true,
"self": 685.6071762989832
},
"communicator.exchange": {
"total": 9345.10972820142,
"count": 1171992,
"is_parallel": true,
"self": 9345.10972820142
},
"steps_from_proto": {
"total": 2510.5137696938073,
"count": 2343984,
"is_parallel": true,
"self": 480.9454844945235,
"children": {
"_process_rank_one_or_two_observation": {
"total": 2029.5682851992838,
"count": 9375936,
"is_parallel": true,
"self": 2029.5682851992838
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 13404.064917801014,
"count": 1171992,
"self": 202.5390362991966,
"children": {
"process_trajectory": {
"total": 4284.44833050177,
"count": 1171992,
"self": 4274.55963380176,
"children": {
"RLTrainer._checkpoint": {
"total": 9.8886967000102,
"count": 35,
"self": 9.8886967000102
}
}
},
"_update_policy": {
"total": 8917.077551000046,
"count": 847,
"self": 4489.795613900346,
"children": {
"TorchPOCAOptimizer.update": {
"total": 4427.2819370996995,
"count": 42320,
"self": 4427.2819370996995
}
}
}
}
}
}
},
"trainer_threads": {
"total": 3.2000025385059416e-06,
"count": 1,
"self": 3.2000025385059416e-06
},
"TrainerController._save_models": {
"total": 0.2940890000027139,
"count": 1,
"self": 0.06316930000321008,
"children": {
"RLTrainer._checkpoint": {
"total": 0.2309196999995038,
"count": 1,
"self": 0.2309196999995038
}
}
}
}
}
}
}